diff --git a/.github/workflows/configure_release_please.yml b/.github/workflows/configure_release_please.yml new file mode 100644 index 000000000000..3fded0495447 --- /dev/null +++ b/.github/workflows/configure_release_please.yml @@ -0,0 +1,58 @@ +# Copyright 2023 Google LLC + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# https://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: A workflow for updating the release please configuration files + +# Controls when the action will run. + +on: + push: + paths: + - '**/gapic_version.py' + branches: + - main + +permissions: + contents: read + +jobs: + build: + permissions: + pull-requests: write # for googleapis/code-suggester + name: Update release please config + runs-on: ubuntu-latest + # don't run the workflow on forks of googleapis/google-cloud-python + if: ${{github.repository == 'googleapis/google-cloud-python'}} + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.11 + uses: actions/setup-python@v4 + with: + python-version: 3.11 + - name: Run configure_release_please.py + run: python3 configure_release_please.py + working-directory: ./scripts/configure_release_please + - uses: googleapis/code-suggester@v4 + env: + ACCESS_TOKEN: ${{ secrets.YOSHI_CODE_BOT_TOKEN }} + with: + command: pr + force: true + upstream_owner: googleapis + upstream_repo: google-cloud-python + description: 'Update release-please config files' + title: 'chore: Update release-please config files' + message: 'chore: Update release-please config files' + branch: update-release-please-config-files + git_dir: '.' diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 9e1a29e079e6..d53fd058f6ec 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: @@ -32,7 +32,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 40b70a4842e5..54cc064cf38a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index acc692633095..5613b2624669 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -36,7 +36,7 @@ jobs: - name: Get current date id: date run: echo "::set-output name=current_date::$(date +'%Y-%m-%d')" - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python 3.9 uses: actions/setup-python@v4 with: diff --git a/.github/workflows/scripts.yml b/.github/workflows/scripts.yml new file mode 100644 index 000000000000..254b7143c986 --- /dev/null +++ b/.github/workflows/scripts.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: scripts + +permissions: + contents: read + +jobs: + test_release_please_scripts: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: "3.9" + - name: Install pytest + run: | + python -m pip install pytest + - name: Run configure_release_please_scripts tests + run: | + pytest scripts/configure_release_please diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 53e6b4adfdf8..9c2d74c6a9fc 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -15,7 +15,7 @@ jobs: python: ['3.7', '3.8', '3.9', '3.10', "3.11"] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 2 - name: Setup Python @@ -46,7 +46,7 @@ jobs: python: ['3.11'] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: @@ -69,7 +69,7 @@ jobs: - unit steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: diff --git a/.github/workflows/updatechangelog.yml b/.github/workflows/updatechangelog.yml index ad8adea3470f..4ea8fd3a975c 100644 --- a/.github/workflows/updatechangelog.yml +++ b/.github/workflows/updatechangelog.yml @@ -33,7 +33,7 @@ jobs: # don't run the workflow on forks of googleapis/google-cloud-python if: ${{github.repository == 'googleapis/google-cloud-python'}} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Run bash script to update CHANGELOG.md in the root directory run: ./scripts/updatechangelog.sh - uses: googleapis/code-suggester@v4 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index ac52da16da91..8cf186dc069e 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -4,106 +4,114 @@ "packages/google-area120-tables": "0.11.3", "packages/google-cloud-access-approval": "1.11.2", "packages/google-cloud-advisorynotifications": "0.3.0", - "packages/google-cloud-alloydb": "0.3.0", + "packages/google-cloud-alloydb": "0.3.1", "packages/google-cloud-api-gateway": "1.7.2", "packages/google-cloud-api-keys": "0.5.3", "packages/google-cloud-apigee-connect": "1.7.2", "packages/google-cloud-apigee-registry": "0.6.3", "packages/google-cloud-appengine-logging": "1.3.1", "packages/google-cloud-artifact-registry": "1.8.3", - "packages/google-cloud-bare-metal-solution": "1.4.2", - "packages/google-cloud-batch": "0.15.0", + "packages/google-cloud-assured-workloads": "1.10.3", + "packages/google-cloud-bare-metal-solution": "1.5.0", + "packages/google-cloud-batch": "0.17.0", "packages/google-cloud-beyondcorp-appconnections": "0.4.3", "packages/google-cloud-beyondcorp-appconnectors": "0.4.3", "packages/google-cloud-beyondcorp-appgateways": "0.4.3", "packages/google-cloud-beyondcorp-clientconnectorservices": "0.4.3", "packages/google-cloud-beyondcorp-clientgateways": "0.4.2", "packages/google-cloud-bigquery-analyticshub": "0.4.2", - "packages/google-cloud-bigquery-biglake": "0.4.1", - "packages/google-cloud-bigquery-data-exchange": "0.5.5", - "packages/google-cloud-bigquery-datapolicies": "0.5.2", - "packages/google-cloud-bigquery-logging": "1.2.2", - "packages/google-cloud-billing": "1.11.2", - "packages/google-cloud-billing-budgets": "1.11.1", - "packages/google-cloud-certificate-manager": "1.4.3", - "packages/google-cloud-channel": "1.15.1", + "packages/google-cloud-bigquery-biglake": "0.4.2", + "packages/google-cloud-bigquery-data-exchange": "0.5.6", + "packages/google-cloud-bigquery-datapolicies": "0.6.1", + "packages/google-cloud-bigquery-logging": "1.3.0", + "packages/google-cloud-bigquery-migration": "0.11.2", + "packages/google-cloud-billing": "1.11.4", + "packages/google-cloud-billing-budgets": "1.12.1", + "packages/google-cloud-certificate-manager": "1.4.4", + "packages/google-cloud-channel": "1.16.0", "packages/google-cloud-commerce-consumer-procurement": "0.0.0", - "packages/google-cloud-confidentialcomputing": "0.2.0", - "packages/google-cloud-contact-center-insights": "1.12.0", - "packages/google-cloud-contentwarehouse": "0.5.1", - "packages/google-cloud-data-fusion": "1.8.2", - "packages/google-cloud-data-qna": "0.10.3", - "packages/google-cloud-datacatalog": "3.14.0", - "packages/google-cloud-datacatalog-lineage": "0.2.3", - "packages/google-cloud-dataform": "0.5.2", - "packages/google-cloud-datalabeling": "1.8.3", - "packages/google-cloud-dataplex": "1.6.1", - "packages/google-cloud-dataproc-metastore": "1.12.0", - "packages/google-cloud-datastream": "1.7.0", - "packages/google-cloud-deploy": "1.12.0", - "packages/google-cloud-discoveryengine": "0.10.0", - "packages/google-cloud-dms": "1.7.1", - "packages/google-cloud-documentai": "2.18.0", - "packages/google-cloud-domains": "1.5.2", - "packages/google-cloud-edgecontainer": "0.5.2", - "packages/google-cloud-enterpriseknowledgegraph": "0.3.3", - "packages/google-cloud-essential-contacts": "1.5.3", - "packages/google-cloud-eventarc": "1.9.2", - "packages/google-cloud-eventarc-publishing": "0.6.3", - "packages/google-cloud-functions": "1.13.2", - "packages/google-cloud-gke-connect-gateway": "0.8.3", - "packages/google-cloud-gke-multicloud": "0.6.2", - "packages/google-cloud-iam-logging": "1.2.1", - "packages/google-cloud-iap": "1.10.3", - "packages/google-cloud-ids": "1.5.2", - "packages/google-cloud-kms-inventory": "0.2.0", - "packages/google-cloud-language": "2.10.1", - "packages/google-cloud-life-sciences": "0.9.3", - "packages/google-cloud-managed-identities": "1.7.2", - "packages/google-cloud-media-translation": "0.11.3", - "packages/google-cloud-memcache": "1.7.2", - "packages/google-cloud-migrationcenter": "0.1.1", - "packages/google-cloud-monitoring-metrics-scopes": "1.4.3", - "packages/google-cloud-netapp": "0.2.0", - "packages/google-cloud-network-connectivity": "2.0.2", - "packages/google-cloud-network-management": "1.9.1", - "packages/google-cloud-network-security": "0.9.3", - "packages/google-cloud-notebooks": "1.7.1", - "packages/google-cloud-orchestration-airflow": "1.9.1", - "packages/google-cloud-phishing-protection": "1.9.1", - "packages/google-cloud-policy-troubleshooter": "1.9.0", - "packages/google-cloud-policysimulator": "0.1.0", - "packages/google-cloud-private-ca": "1.8.1", - "packages/google-cloud-private-catalog": "0.9.3", - "packages/google-cloud-public-ca": "0.3.3", - "packages/google-cloud-rapidmigrationassessment": "0.1.1", - "packages/google-cloud-recaptcha-enterprise": "1.12.1", - "packages/google-cloud-recommendations-ai": "0.10.4", - "packages/google-cloud-recommender": "2.11.2", - "packages/google-cloud-redis": "2.13.1", - "packages/google-cloud-resource-manager": "1.10.3", - "packages/google-cloud-resource-settings": "1.7.1", - "packages/google-cloud-secret-manager": "2.16.3", - "packages/google-cloud-securitycenter": "1.23.2", - "packages/google-cloud-service-control": "1.9.1", - "packages/google-cloud-service-directory": "1.9.1", - "packages/google-cloud-service-usage": "1.7.2", - "packages/google-cloud-shell": "1.7.2", - "packages/google-cloud-source-context": "1.4.2", - "packages/google-cloud-storageinsights": "0.1.1", - "packages/google-cloud-support": "0.1.1", - "packages/google-cloud-talent": "2.11.1", - "packages/google-cloud-tpu": "1.13.0", - "packages/google-cloud-vm-migration": "1.6.2", - "packages/google-cloud-vmwareengine": "1.1.1", - "packages/google-cloud-vpc-access": "1.8.2", - "packages/google-cloud-webrisk": "1.12.1", - "packages/google-cloud-websecurityscanner": "1.12.2", - "packages/google-cloud-workflows": "1.10.2", - "packages/google-cloud-workstations": "0.4.1", - "packages/google-geo-type": "0.3.2", - "packages/google-maps-addressvalidation": "0.3.5", - "packages/google-maps-mapsplatformdatasets": "0.3.1", - "packages/google-maps-places": "0.1.2", - "packages/google-maps-routing": "0.6.1" + "packages/google-cloud-confidentialcomputing": "0.4.1", + "packages/google-cloud-config": "0.1.1", + "packages/google-cloud-contact-center-insights": "1.12.1", + "packages/google-cloud-contentwarehouse": "0.7.1", + "packages/google-cloud-data-fusion": "1.8.3", + "packages/google-cloud-data-qna": "0.10.4", + "packages/google-cloud-datacatalog": "3.15.2", + "packages/google-cloud-datacatalog-lineage": "0.2.4", + "packages/google-cloud-dataform": "0.5.3", + "packages/google-cloud-datalabeling": "1.8.4", + "packages/google-cloud-dataplex": "1.6.3", + "packages/google-cloud-dataproc-metastore": "1.12.1", + "packages/google-cloud-datastream": "1.7.1", + "packages/google-cloud-deploy": "1.12.1", + "packages/google-cloud-dialogflow-cx": "1.27.1", + "packages/google-cloud-discoveryengine": "0.11.1", + "packages/google-cloud-dms": "1.7.2", + "packages/google-cloud-documentai": "2.18.1", + "packages/google-cloud-domains": "1.5.3", + "packages/google-cloud-edgecontainer": "0.5.3", + "packages/google-cloud-enterpriseknowledgegraph": "0.3.4", + "packages/google-cloud-essential-contacts": "1.5.4", + "packages/google-cloud-eventarc": "1.9.3", + "packages/google-cloud-eventarc-publishing": "0.6.4", + "packages/google-cloud-functions": "1.13.3", + "packages/google-cloud-gke-backup": "0.5.2", + "packages/google-cloud-gke-connect-gateway": "0.8.4", + "packages/google-cloud-gke-multicloud": "0.6.3", + "packages/google-cloud-iam": "2.12.2", + "packages/google-cloud-iam-logging": "1.2.2", + "packages/google-cloud-iap": "1.11.0", + "packages/google-cloud-ids": "1.5.3", + "packages/google-cloud-kms-inventory": "0.2.1", + "packages/google-cloud-language": "2.11.1", + "packages/google-cloud-life-sciences": "0.9.4", + "packages/google-cloud-managed-identities": "1.7.3", + "packages/google-cloud-media-translation": "0.11.4", + "packages/google-cloud-memcache": "1.7.3", + "packages/google-cloud-migrationcenter": "0.1.2", + "packages/google-cloud-monitoring-metrics-scopes": "1.4.4", + "packages/google-cloud-netapp": "0.3.1", + "packages/google-cloud-network-connectivity": "2.1.0", + "packages/google-cloud-network-management": "1.11.1", + "packages/google-cloud-network-security": "0.9.4", + "packages/google-cloud-network-services": "0.5.4", + "packages/google-cloud-notebooks": "1.8.1", + "packages/google-cloud-optimization": "1.5.0", + "packages/google-cloud-orchestration-airflow": "1.9.2", + "packages/google-cloud-phishing-protection": "1.9.2", + "packages/google-cloud-policy-troubleshooter": "1.9.1", + "packages/google-cloud-policysimulator": "0.1.1", + "packages/google-cloud-private-ca": "1.8.2", + "packages/google-cloud-private-catalog": "0.9.4", + "packages/google-cloud-public-ca": "0.3.4", + "packages/google-cloud-rapidmigrationassessment": "0.1.2", + "packages/google-cloud-recaptcha-enterprise": "1.12.2", + "packages/google-cloud-recommendations-ai": "0.10.5", + "packages/google-cloud-recommender": "2.12.1", + "packages/google-cloud-redis": "2.13.2", + "packages/google-cloud-resource-manager": "1.10.4", + "packages/google-cloud-resource-settings": "1.7.2", + "packages/google-cloud-secret-manager": "2.16.4", + "packages/google-cloud-securitycenter": "1.23.3", + "packages/google-cloud-service-control": "1.9.2", + "packages/google-cloud-service-directory": "1.9.2", + "packages/google-cloud-service-usage": "1.8.0", + "packages/google-cloud-shell": "1.7.3", + "packages/google-cloud-source-context": "1.4.3", + "packages/google-cloud-storageinsights": "0.1.3", + "packages/google-cloud-support": "0.1.2", + "packages/google-cloud-talent": "2.11.2", + "packages/google-cloud-tpu": "1.15.1", + "packages/google-cloud-vm-migration": "1.6.3", + "packages/google-cloud-vmwareengine": "1.1.2", + "packages/google-cloud-vpc-access": "1.8.3", + "packages/google-cloud-webrisk": "1.12.2", + "packages/google-cloud-websecurityscanner": "1.12.3", + "packages/google-cloud-workflows": "1.12.1", + "packages/google-cloud-workstations": "0.5.1", + "packages/google-geo-type": "0.3.3", + "packages/google-maps-addressvalidation": "0.3.6", + "packages/google-maps-mapsplatformdatasets": "0.3.2", + "packages/google-maps-places": "0.1.3", + "packages/google-maps-routing": "0.6.2" } diff --git a/CHANGELOG.md b/CHANGELOG.md index bf81c9e28725..99f743354310 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,102 +6,111 @@ Changelogs - [google-apps-script-type==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) - [google-area120-tables==0.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-area120-tables/CHANGELOG.md) - [google-cloud-access-approval==1.11.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) -- [google-cloud-advisorynotifications==0.2.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) +- [google-cloud-advisorynotifications==0.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) - [google-cloud-alloydb==0.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) - [google-cloud-api-gateway==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) - [google-cloud-api-keys==0.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) - [google-cloud-apigee-connect==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) - [google-cloud-apigee-registry==0.6.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) - [google-cloud-appengine-logging==1.3.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) -- [google-cloud-artifact-registry==1.8.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) -- [google-cloud-bare-metal-solution==1.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-artifact-registry==1.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) +- [google-cloud-assured-workloads==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) +- [google-cloud-bare-metal-solution==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) +- [google-cloud-batch==0.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) - [google-cloud-beyondcorp-appconnections==0.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) - [google-cloud-beyondcorp-appconnectors==0.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) - [google-cloud-beyondcorp-appgateways==0.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) - [google-cloud-beyondcorp-clientconnectorservices==0.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md) - [google-cloud-beyondcorp-clientgateways==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md) -- [google-cloud-bigquery-analyticshub==0.4.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md) +- [google-cloud-bigquery-analyticshub==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md) - [google-cloud-bigquery-biglake==0.4.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/CHANGELOG.md) -- [google-cloud-bigquery-data-exchange==0.5.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) -- [google-cloud-bigquery-datapolicies==0.5.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) +- [google-cloud-bigquery-data-exchange==0.5.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) +- [google-cloud-bigquery-datapolicies==0.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) - [google-cloud-bigquery-logging==1.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) -- [google-cloud-billing-budgets==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) -- [google-cloud-billing==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) +- [google-cloud-bigquery-migration==0.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) +- [google-cloud-billing-budgets==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) +- [google-cloud-billing==1.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) - [google-cloud-certificate-manager==1.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) - [google-cloud-channel==1.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) -- [google-cloud-confidentialcomputing==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) +- [google-cloud-confidentialcomputing==0.4.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) +- [google-cloud-config==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) - [google-cloud-contact-center-insights==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) -- [google-cloud-contentwarehouse==0.5.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) +- [google-cloud-contentwarehouse==0.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) - [google-cloud-data-fusion==1.8.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) - [google-cloud-data-qna==0.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/CHANGELOG.md) - [google-cloud-datacatalog-lineage==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/CHANGELOG.md) -- [google-cloud-datacatalog==3.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) +- [google-cloud-datacatalog==3.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) - [google-cloud-dataform==0.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) - [google-cloud-datalabeling==1.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) -- [google-cloud-dataplex==1.5.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) +- [google-cloud-dataplex==1.6.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) - [google-cloud-dataproc-metastore==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) - [google-cloud-datastream==1.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) -- [google-cloud-deploy==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) -- [google-cloud-discoveryengine==0.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) +- [google-cloud-deploy==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) +- [google-cloud-dialogflow-cx==1.27.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) +- [google-cloud-discoveryengine==0.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) - [google-cloud-dms==1.7.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) -- [google-cloud-documentai==2.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) +- [google-cloud-documentai==2.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) - [google-cloud-domains==1.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) - [google-cloud-edgecontainer==0.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) - [google-cloud-enterpriseknowledgegraph==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) - [google-cloud-essential-contacts==1.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) - [google-cloud-eventarc-publishing==0.6.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) - [google-cloud-eventarc==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) -- [google-cloud-functions==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) +- [google-cloud-functions==1.13.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) +- [google-cloud-gke-backup==0.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) - [google-cloud-gke-connect-gateway==0.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) - [google-cloud-gke-multicloud==0.6.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) - [google-cloud-iam-logging==1.2.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) -- [google-cloud-iap==1.10.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) +- [google-cloud-iam==2.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) +- [google-cloud-iap==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) - [google-cloud-ids==1.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) - [google-cloud-kms-inventory==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) -- [google-cloud-language==2.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) +- [google-cloud-language==2.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) - [google-cloud-life-sciences==0.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) - [google-cloud-managed-identities==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) - [google-cloud-media-translation==0.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) - [google-cloud-memcache==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) - [google-cloud-migrationcenter==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) - [google-cloud-monitoring-metrics-scopes==1.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) -- [google-cloud-netapp==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) +- [google-cloud-netapp==0.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) - [google-cloud-network-connectivity==2.0.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) -- [google-cloud-network-management==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) +- [google-cloud-network-management==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) - [google-cloud-network-security==0.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) -- [google-cloud-notebooks==1.7.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) +- [google-cloud-network-services==0.5.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) +- [google-cloud-notebooks==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) +- [google-cloud-optimization==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) - [google-cloud-orchestration-airflow==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) - [google-cloud-phishing-protection==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) - [google-cloud-policy-troubleshooter==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) +- [google-cloud-policysimulator==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) - [google-cloud-private-ca==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) - [google-cloud-private-catalog==0.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) - [google-cloud-public-ca==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) - [google-cloud-rapidmigrationassessment==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) - [google-cloud-recaptcha-enterprise==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) - [google-cloud-recommendations-ai==0.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) -- [google-cloud-recommender==2.11.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) +- [google-cloud-recommender==2.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) - [google-cloud-redis==2.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) -- [google-cloud-resource-manager==1.10.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) +- [google-cloud-resource-manager==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) - [google-cloud-resource-settings==1.7.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) -- [google-cloud-secret-manager==2.16.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) -- [google-cloud-securitycenter==1.23.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) +- [google-cloud-secret-manager==2.16.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) +- [google-cloud-securitycenter==1.23.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) - [google-cloud-service-control==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) -- [google-cloud-service-directory==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) +- [google-cloud-service-directory==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) - [google-cloud-service-usage==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) - [google-cloud-shell==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) - [google-cloud-source-context==1.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) -- [google-cloud-storageinsights==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) +- [google-cloud-storageinsights==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) - [google-cloud-support==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) - [google-cloud-talent==2.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) -- [google-cloud-tpu==1.11.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) +- [google-cloud-tpu==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) - [google-cloud-vm-migration==1.6.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) - [google-cloud-vmwareengine==1.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) - [google-cloud-vpc-access==1.8.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) - [google-cloud-webrisk==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-webrisk/CHANGELOG.md) - [google-cloud-websecurityscanner==1.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-websecurityscanner/CHANGELOG.md) -- [google-cloud-workflows==1.10.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) -- [google-cloud-workstations==0.4.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) +- [google-cloud-workflows==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) +- [google-cloud-workstations==0.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) - [google-geo-type==0.3.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) - [google-maps-addressvalidation==0.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) - [google-maps-mapsplatformdatasets==0.3.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) diff --git a/packages/google-cloud-alloydb/.flake8 b/packages/google-cloud-alloydb/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-alloydb/.flake8 +++ b/packages/google-cloud-alloydb/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-alloydb/CHANGELOG.md b/packages/google-cloud-alloydb/CHANGELOG.md index 048a254a8a78..99f81a5721b5 100644 --- a/packages/google-cloud-alloydb/CHANGELOG.md +++ b/packages/google-cloud-alloydb/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.3.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.0...google-cloud-alloydb-v0.3.1) (2023-09-19) + + +### Features + +* Added ClientConnectionConfig ([c76e881](https://github.com/googleapis/google-cloud-python/commit/c76e88194ea5ae3851cdd61071bc9e8106ae1571)) +* Added DatabaseVersion ([c76e881](https://github.com/googleapis/google-cloud-python/commit/c76e88194ea5ae3851cdd61071bc9e8106ae1571)) +* Added enum value for PG15 ([c76e881](https://github.com/googleapis/google-cloud-python/commit/c76e88194ea5ae3851cdd61071bc9e8106ae1571)) +* Added QuantityBasedExpiry ([c76e881](https://github.com/googleapis/google-cloud-python/commit/c76e88194ea5ae3851cdd61071bc9e8106ae1571)) +* Changed description for recovery_window_days in ContinuousBackupConfig ([c76e881](https://github.com/googleapis/google-cloud-python/commit/c76e88194ea5ae3851cdd61071bc9e8106ae1571)) +* Deprecate network field in favor of network_config.network ([c76e881](https://github.com/googleapis/google-cloud-python/commit/c76e88194ea5ae3851cdd61071bc9e8106ae1571)) + ## [0.3.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.2.1...google-cloud-alloydb-v0.3.0) (2023-07-17) diff --git a/packages/google-cloud-alloydb/CONTRIBUTING.rst b/packages/google-cloud-alloydb/CONTRIBUTING.rst index d7353cddcc6e..abc93b942c32 100644 --- a/packages/google-cloud-alloydb/CONTRIBUTING.rst +++ b/packages/google-cloud-alloydb/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-alloydb/noxfile.py ********** diff --git a/packages/google-cloud-alloydb/MANIFEST.in b/packages/google-cloud-alloydb/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-alloydb/MANIFEST.in +++ b/packages/google-cloud-alloydb/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-alloydb/README.rst b/packages/google-cloud-alloydb/README.rst index 690405d2b972..dbc6e53e435e 100644 --- a/packages/google-cloud-alloydb/README.rst +++ b/packages/google-cloud-alloydb/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-alloydb + pip install google-cloud-alloydb Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-alloydb + py -m venv + .\\Scripts\activate + pip install google-cloud-alloydb Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-alloydb/docs/conf.py b/packages/google-cloud-alloydb/docs/conf.py index 8b330b2a3459..3424ec8d500e 100644 --- a/packages/google-cloud-alloydb/docs/conf.py +++ b/packages/google-cloud-alloydb/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py index 14bfd0fce7ce..5c99b56d549d 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.0" # {x-release-please-version} +__version__ = "0.3.1" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py index 14bfd0fce7ce..5c99b56d549d 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.0" # {x-release-please-version} +__version__ = "0.3.1" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py index 52e6810ae5cd..5ecbf9cc49ff 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -3334,6 +3334,7 @@ async def sample_list_users(): Returns: google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListUsersAsyncPager: Message for response to listing Users + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py index 4912e94cd8f1..c80fe2cce1ad 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -3627,6 +3627,7 @@ def sample_list_users(): Returns: google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListUsersPager: Message for response to listing Users + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py index 9268e289ae54..2a7bef1fa628 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py index 0f4cc3f298ff..349be55039f8 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py @@ -141,8 +141,7 @@ class MigrationSource(proto.Message): Attributes: host_port (str): Output only. The host and port of the - on-premises instance in host:port - format + on-premises instance in host:port format reference_id (str): Output only. Place holder for the external source identifier(e.g DMS job name) that created @@ -366,6 +365,7 @@ class AutomatedBackupPolicy(proto.Message): The location where the backup will be stored. Currently, the only supported option is to store the backup in the same region as the cluster. + If empty, defaults to the region of the cluster. labels (MutableMapping[str, str]): Labels to apply to backups created using this @@ -387,10 +387,12 @@ class WeeklySchedule(proto.Message): The times during the day to start a backup. The start times are assumed to be in UTC and to be an exact hour (e.g., 04:00:00). + If no start times are provided, a single fixed start time is chosen arbitrarily. days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): The days of the week to perform a backup. + If this field is left empty, the default of every day of the week is used. """ @@ -492,10 +494,11 @@ class ContinuousBackupConfig(proto.Message): This field is a member of `oneof`_ ``_enabled``. recovery_window_days (int): - The number of days backups and logs will be - retained, which determines the window of time - that data is recoverable for. If not set, it - defaults to 14 days. + The number of days that are eligible to + restore from using PITR. To support the entire + recovery window, backups and logs are retained + for one day more than the recovery window. If + not set, defaults to 14 days. encryption_config (google.cloud.alloydb_v1.types.EncryptionConfig): The encryption config can be specified to encrypt the backups with a customer-managed @@ -673,18 +676,21 @@ class Cluster(proto.Message): which RPC was used to create the cluster (i.e. ``CreateCluster`` vs. ``CreateSecondaryCluster`` database_version (google.cloud.alloydb_v1.types.DatabaseVersion): - Output only. The database engine major - version. This is an output-only field and it's - populated at the Cluster creation time. This - field cannot be changed after cluster creation. + Optional. The database engine major version. + This is an optional field and it is populated at + the Cluster creation time. If a database version + is not supplied at cluster creation time, then a + default database version will be used. + network_config (google.cloud.alloydb_v1.types.Cluster.NetworkConfig): + network (str): Required. The resource link for the VPC network in which cluster resources are created and from which they are accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: - "projects/{project_number}/global/networks/{network_id}". - This is required to create a cluster. It can be updated, but - it cannot be removed. + "projects/{project}/global/networks/{network_id}". This is + required to create a cluster. Deprecated, use + network_config.network instead. etag (str): For Resource freshness validation (https://google.aip.dev/154) @@ -694,18 +700,19 @@ class Cluster(proto.Message): from labels. https://google.aip.dev/128 reconciling (bool): Output only. Reconciling - (https://google.aip.dev/128#reconciliation). - Set to true if the current state of Cluster does - not match the user's intended state, and the - service is actively updating the resource to - reconcile them. This can happen due to - user-triggered updates or system actions like - failover or maintenance. + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Cluster does not + match the user's intended state, and the service + is actively updating the resource to reconcile + them. This can happen due to user-triggered + updates or system actions like failover or + maintenance. initial_user (google.cloud.alloydb_v1.types.UserPassword): Input only. Initial user to setup during cluster creation. Required. If used in ``RestoreCluster`` this is ignored. automated_backup_policy (google.cloud.alloydb_v1.types.AutomatedBackupPolicy): The automated backup policy for this cluster. + If no policy is provided then the default policy will be used. If backups are supported for the cluster, the default policy takes one backup a @@ -807,6 +814,37 @@ class ClusterType(proto.Enum): PRIMARY = 1 SECONDARY = 2 + class NetworkConfig(proto.Message): + r"""Metadata related to network configuration. + + Attributes: + network (str): + Required. The resource link for the VPC network in which + cluster resources are created and from which they are + accessible via Private IP. The network must belong to the + same project as the cluster. It is specified in the form: + "projects/{project_number}/global/networks/{network_id}". + This is required to create a cluster. + allocated_ip_range (str): + Optional. Name of the allocated IP range for the private IP + AlloyDB cluster, for example: + "google-managed-services-default". If set, the instance IPs + for this cluster will be created in the allocated range. The + range name must comply with RFC 1035. Specifically, the name + must be 1-63 characters long and match the regular + expression `a-z <[-a-z0-9]*[a-z0-9]>`__?. Field name is + intended to be consistent with CloudSQL. + """ + + network: str = proto.Field( + proto.STRING, + number=1, + ) + allocated_ip_range: str = proto.Field( + proto.STRING, + number=2, + ) + class SecondaryConfig(proto.Message): r"""Configuration information for the secondary cluster. This should be set if and only if the cluster is of type SECONDARY. @@ -898,6 +936,11 @@ class PrimaryConfig(proto.Message): number=9, enum="DatabaseVersion", ) + network_config: NetworkConfig = proto.Field( + proto.MESSAGE, + number=29, + message=NetworkConfig, + ) network: str = proto.Field( proto.STRING, number=10, @@ -1056,10 +1099,10 @@ class Instance(proto.Message): application. reconciling (bool): Output only. Reconciling - (https://google.aip.dev/128#reconciliation). - Set to true if the current state of Instance - does not match the user's intended state, and - the service is actively updating the resource to + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Instance does + not match the user's intended state, and the + service is actively updating the resource to reconcile them. This can happen due to user-triggered updates or system actions like failover or maintenance. @@ -1070,6 +1113,9 @@ class Instance(proto.Message): Annotations to allow client tools to store small amount of arbitrary data. This is distinct from labels. https://google.aip.dev/128 + client_connection_config (google.cloud.alloydb_v1.types.Instance.ClientConnectionConfig): + Optional. Client connection specific + configurations """ class State(proto.Enum): @@ -1279,6 +1325,29 @@ class ReadPoolConfig(proto.Message): number=1, ) + class ClientConnectionConfig(proto.Message): + r"""Client connection configuration + + Attributes: + require_connectors (bool): + Optional. Configuration to enforce connectors + only (ex: AuthProxy) connections to the + database. + ssl_config (google.cloud.alloydb_v1.types.SslConfig): + Optional. SSL config option for this + instance. + """ + + require_connectors: bool = proto.Field( + proto.BOOL, + number=1, + ) + ssl_config: "SslConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="SslConfig", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -1377,6 +1446,11 @@ class ReadPoolConfig(proto.Message): proto.STRING, number=18, ) + client_connection_config: ClientConnectionConfig = proto.Field( + proto.MESSAGE, + number=23, + message=ClientConnectionConfig, + ) class Backup(proto.Message): @@ -1457,6 +1531,17 @@ class Backup(proto.Message): to be garbage collected. It is the duration specified by the backup's retention policy, added to the backup's create_time. + expiry_quantity (google.cloud.alloydb_v1.types.Backup.QuantityBasedExpiry): + Output only. The QuantityBasedExpiry of the + backup, specified by the backup's retention + policy. Once the expiry quantity is over + retention, the backup is eligible to be garbage + collected. + database_version (google.cloud.alloydb_v1.types.DatabaseVersion): + Output only. The database engine major + version of the cluster this backup was created + from. Any restored cluster created from this + backup will have the same database version. """ class State(proto.Enum): @@ -1503,6 +1588,41 @@ class Type(proto.Enum): AUTOMATED = 2 CONTINUOUS = 3 + class QuantityBasedExpiry(proto.Message): + r"""A backup's position in a quantity-based retention queue, of backups + with the same source cluster and type, with length, retention, + specified by the backup's retention policy. Once the position is + greater than the retention, the backup is eligible to be garbage + collected. + + Example: 5 backups from the same source cluster and type with a + quantity-based retention of 3 and denoted by backup_id (position, + retention). + + Safe: backup_5 (1, 3), backup_4, (2, 3), backup_3 (3, 3). Awaiting + garbage collection: backup_2 (4, 3), backup_1 (5, 3) + + Attributes: + retention_count (int): + Output only. The backup's position among its + backups with the same source cluster and type, + by descending chronological order create + time(i.e. newest first). + total_retention_count (int): + Output only. The length of the quantity-based + queue, specified by the backup's retention + policy. + """ + + retention_count: int = proto.Field( + proto.INT32, + number=1, + ) + total_retention_count: int = proto.Field( + proto.INT32, + number=2, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -1589,6 +1709,16 @@ class Type(proto.Enum): number=19, message=timestamp_pb2.Timestamp, ) + expiry_quantity: QuantityBasedExpiry = proto.Field( + proto.MESSAGE, + number=20, + message=QuantityBasedExpiry, + ) + database_version: "DatabaseVersion" = proto.Field( + proto.ENUM, + number=22, + enum="DatabaseVersion", + ) class SupportedDatabaseFlag(proto.Message): diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/service.py index 55789cce81f8..f9cc801d3da3 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/service.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/service.py @@ -194,6 +194,7 @@ class CreateSecondaryClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -202,6 +203,7 @@ class CreateSecondaryClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -254,6 +256,7 @@ class CreateClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -262,6 +265,7 @@ class CreateClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -315,6 +319,7 @@ class UpdateClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -323,6 +328,7 @@ class UpdateClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -376,6 +382,7 @@ class DeleteClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -384,6 +391,7 @@ class DeleteClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -439,6 +447,7 @@ class PromoteClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -447,6 +456,7 @@ class PromoteClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -517,6 +527,7 @@ class RestoreClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -525,6 +536,7 @@ class RestoreClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -692,6 +704,7 @@ class CreateInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -700,6 +713,7 @@ class CreateInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -752,6 +766,7 @@ class CreateSecondaryInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -760,6 +775,7 @@ class CreateSecondaryInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -827,6 +843,7 @@ class BatchCreateInstancesRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -835,6 +852,7 @@ class BatchCreateInstancesRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1024,6 +1042,7 @@ class UpdateInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1032,6 +1051,7 @@ class UpdateInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1085,6 +1105,7 @@ class DeleteInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1093,6 +1114,7 @@ class DeleteInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1141,6 +1163,7 @@ class FailoverInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1149,6 +1172,7 @@ class FailoverInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1191,6 +1215,7 @@ class InjectFaultRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1199,6 +1224,7 @@ class InjectFaultRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1256,6 +1282,7 @@ class RestartInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1264,6 +1291,7 @@ class RestartInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1392,6 +1420,7 @@ class CreateBackupRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1400,6 +1429,7 @@ class CreateBackupRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1451,6 +1481,7 @@ class UpdateBackupRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1459,6 +1490,7 @@ class UpdateBackupRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1510,6 +1542,7 @@ class DeleteBackupRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1518,6 +1551,7 @@ class DeleteBackupRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1797,6 +1831,7 @@ class CreateUserRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1805,6 +1840,7 @@ class CreateUserRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1856,6 +1892,7 @@ class UpdateUserRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1864,6 +1901,7 @@ class UpdateUserRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1914,6 +1952,7 @@ class DeleteUserRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1922,6 +1961,7 @@ class DeleteUserRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py index 14bfd0fce7ce..5c99b56d549d 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.0" # {x-release-please-version} +__version__ = "0.3.1" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py index 1280ba04266a..48da4f7865b9 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -3445,7 +3445,6 @@ async def sample_get_connection_info(): parent (:class:`str`): Required. The name of the parent resource. The required format is: - projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} This corresponds to the ``parent`` field @@ -3571,6 +3570,7 @@ async def sample_list_users(): Returns: google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListUsersAsyncPager: Message for response to listing Users + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py index b27c51e8ece7..7f7a61cfd125 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -3751,7 +3751,6 @@ def sample_get_connection_info(): parent (str): Required. The name of the parent resource. The required format is: - projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} This corresponds to the ``parent`` field @@ -3868,6 +3867,7 @@ def sample_list_users(): Returns: google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListUsersPager: Message for response to listing Users + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py index c950042d0c52..600448da403b 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py index c04f999569f5..bc210f3a2a00 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py @@ -142,8 +142,7 @@ class MigrationSource(proto.Message): Attributes: host_port (str): Output only. The host and port of the - on-premises instance in host:port - format + on-premises instance in host:port format reference_id (str): Output only. Place holder for the external source identifier(e.g DMS job name) that created @@ -367,6 +366,7 @@ class AutomatedBackupPolicy(proto.Message): The location where the backup will be stored. Currently, the only supported option is to store the backup in the same region as the cluster. + If empty, defaults to the region of the cluster. labels (MutableMapping[str, str]): Labels to apply to backups created using this @@ -388,10 +388,12 @@ class WeeklySchedule(proto.Message): The times during the day to start a backup. The start times are assumed to be in UTC and to be an exact hour (e.g., 04:00:00). + If no start times are provided, a single fixed start time is chosen arbitrarily. days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): The days of the week to perform a backup. + If this field is left empty, the default of every day of the week is used. """ @@ -699,18 +701,19 @@ class Cluster(proto.Message): from labels. https://google.aip.dev/128 reconciling (bool): Output only. Reconciling - (https://google.aip.dev/128#reconciliation). - Set to true if the current state of Cluster does - not match the user's intended state, and the - service is actively updating the resource to - reconcile them. This can happen due to - user-triggered updates or system actions like - failover or maintenance. + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Cluster does not + match the user's intended state, and the service + is actively updating the resource to reconcile + them. This can happen due to user-triggered + updates or system actions like failover or + maintenance. initial_user (google.cloud.alloydb_v1alpha.types.UserPassword): Input only. Initial user to setup during cluster creation. Required. If used in ``RestoreCluster`` this is ignored. automated_backup_policy (google.cloud.alloydb_v1alpha.types.AutomatedBackupPolicy): The automated backup policy for this cluster. + If no policy is provided then the default policy will be used. If backups are supported for the cluster, the default policy takes one backup a @@ -1104,10 +1107,10 @@ class Instance(proto.Message): application. reconciling (bool): Output only. Reconciling - (https://google.aip.dev/128#reconciliation). - Set to true if the current state of Instance - does not match the user's intended state, and - the service is actively updating the resource to + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Instance does + not match the user's intended state, and the + service is actively updating the resource to reconcile them. This can happen due to user-triggered updates or system actions like failover or maintenance. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py index 249dae8b65d9..9f9aa79a6e48 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py @@ -198,6 +198,7 @@ class CreateSecondaryClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -206,6 +207,7 @@ class CreateSecondaryClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -258,6 +260,7 @@ class CreateClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -266,6 +269,7 @@ class CreateClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -319,6 +323,7 @@ class UpdateClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -327,6 +332,7 @@ class UpdateClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -380,6 +386,7 @@ class DeleteClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -388,6 +395,7 @@ class DeleteClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -443,6 +451,7 @@ class PromoteClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -451,6 +460,7 @@ class PromoteClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -521,6 +531,7 @@ class RestoreClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -529,6 +540,7 @@ class RestoreClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -696,6 +708,7 @@ class CreateInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -704,6 +717,7 @@ class CreateInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -756,6 +770,7 @@ class CreateSecondaryInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -764,6 +779,7 @@ class CreateSecondaryInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -831,6 +847,7 @@ class BatchCreateInstancesRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -839,6 +856,7 @@ class BatchCreateInstancesRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1028,6 +1046,7 @@ class UpdateInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1036,6 +1055,7 @@ class UpdateInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1089,6 +1109,7 @@ class DeleteInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1097,6 +1118,7 @@ class DeleteInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1145,6 +1167,7 @@ class FailoverInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1153,6 +1176,7 @@ class FailoverInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1195,6 +1219,7 @@ class InjectFaultRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1203,6 +1228,7 @@ class InjectFaultRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1260,6 +1286,7 @@ class RestartInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1268,6 +1295,7 @@ class RestartInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1396,6 +1424,7 @@ class CreateBackupRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1404,6 +1433,7 @@ class CreateBackupRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1455,6 +1485,7 @@ class UpdateBackupRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1463,6 +1494,7 @@ class UpdateBackupRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1514,6 +1546,7 @@ class DeleteBackupRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1522,6 +1555,7 @@ class DeleteBackupRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1636,6 +1670,7 @@ class GenerateClientCertificateRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1644,6 +1679,7 @@ class GenerateClientCertificateRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1732,7 +1768,6 @@ class GetConnectionInfoRequest(proto.Message): parent (str): Required. The name of the parent resource. The required format is: - projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} request_id (str): Optional. An optional request ID to identify @@ -1741,6 +1776,7 @@ class GetConnectionInfoRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1749,6 +1785,7 @@ class GetConnectionInfoRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1946,6 +1983,7 @@ class CreateUserRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1954,6 +1992,7 @@ class CreateUserRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -2005,6 +2044,7 @@ class UpdateUserRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -2013,6 +2053,7 @@ class UpdateUserRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -2063,6 +2104,7 @@ class DeleteUserRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -2071,6 +2113,7 @@ class DeleteUserRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py index 14bfd0fce7ce..5c99b56d549d 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.0" # {x-release-please-version} +__version__ = "0.3.1" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py index 80a8462064d4..4700ea8eb534 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -3291,11 +3291,12 @@ async def generate_client_certificate( metadata: Sequence[Tuple[str, str]] = (), ) -> service.GenerateClientCertificateResponse: r"""Generate a client certificate signed by a Cluster CA. - The sole purpose of this endpoint is to support the Auth - Proxy client and the endpoint's behavior is subject to - change without notice, so do not rely on its behavior - remaining constant. Future changes will not break the - Auth Proxy client. + The sole purpose of this endpoint is to support AlloyDB + connectors and the Auth Proxy client. The endpoint's + behavior is subject to change without notice, so do not + rely on its behavior remaining constant. Future changes + will not break AlloyDB connectors or the Auth Proxy + client. .. code-block:: python @@ -3444,7 +3445,6 @@ async def sample_get_connection_info(): parent (:class:`str`): Required. The name of the parent resource. The required format is: - projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} This corresponds to the ``parent`` field @@ -3570,6 +3570,7 @@ async def sample_list_users(): Returns: google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListUsersAsyncPager: Message for response to listing Users + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py index ddafb9eb4dc4..70c2820227c3 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -3604,11 +3604,12 @@ def generate_client_certificate( metadata: Sequence[Tuple[str, str]] = (), ) -> service.GenerateClientCertificateResponse: r"""Generate a client certificate signed by a Cluster CA. - The sole purpose of this endpoint is to support the Auth - Proxy client and the endpoint's behavior is subject to - change without notice, so do not rely on its behavior - remaining constant. Future changes will not break the - Auth Proxy client. + The sole purpose of this endpoint is to support AlloyDB + connectors and the Auth Proxy client. The endpoint's + behavior is subject to change without notice, so do not + rely on its behavior remaining constant. Future changes + will not break AlloyDB connectors or the Auth Proxy + client. .. code-block:: python @@ -3750,7 +3751,6 @@ def sample_get_connection_info(): parent (str): Required. The name of the parent resource. The required format is: - projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} This corresponds to the ``parent`` field @@ -3867,6 +3867,7 @@ def sample_list_users(): Returns: google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListUsersPager: Message for response to listing Users + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py index 208c31eccd7d..086f0353abac 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py @@ -910,11 +910,12 @@ def generate_client_certificate( r"""Return a callable for the generate client certificate method over gRPC. Generate a client certificate signed by a Cluster CA. - The sole purpose of this endpoint is to support the Auth - Proxy client and the endpoint's behavior is subject to - change without notice, so do not rely on its behavior - remaining constant. Future changes will not break the - Auth Proxy client. + The sole purpose of this endpoint is to support AlloyDB + connectors and the Auth Proxy client. The endpoint's + behavior is subject to change without notice, so do not + rely on its behavior remaining constant. Future changes + will not break AlloyDB connectors or the Auth Proxy + client. Returns: Callable[[~.GenerateClientCertificateRequest], diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py index 53e5f599d210..f02f4fd2e5b4 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py @@ -933,11 +933,12 @@ def generate_client_certificate( r"""Return a callable for the generate client certificate method over gRPC. Generate a client certificate signed by a Cluster CA. - The sole purpose of this endpoint is to support the Auth - Proxy client and the endpoint's behavior is subject to - change without notice, so do not rely on its behavior - remaining constant. Future changes will not break the - Auth Proxy client. + The sole purpose of this endpoint is to support AlloyDB + connectors and the Auth Proxy client. The endpoint's + behavior is subject to change without notice, so do not + rely on its behavior remaining constant. Future changes + will not break AlloyDB connectors or the Auth Proxy + client. Returns: Callable[[~.GenerateClientCertificateRequest], diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py index 54206cf96abe..918a6ba2d487 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py index 0911559775a8..5409a6415ad5 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py @@ -108,10 +108,13 @@ class DatabaseVersion(proto.Enum): 13. POSTGRES_14 (2): The database version is Postgres 14. + POSTGRES_15 (3): + The database version is Postgres 15. """ DATABASE_VERSION_UNSPECIFIED = 0 POSTGRES_13 = 1 POSTGRES_14 = 2 + POSTGRES_15 = 3 class UserPassword(proto.Message): @@ -142,8 +145,7 @@ class MigrationSource(proto.Message): Attributes: host_port (str): Output only. The host and port of the - on-premises instance in host:port - format + on-premises instance in host:port format reference_id (str): Output only. Place holder for the external source identifier(e.g DMS job name) that created @@ -367,6 +369,7 @@ class AutomatedBackupPolicy(proto.Message): The location where the backup will be stored. Currently, the only supported option is to store the backup in the same region as the cluster. + If empty, defaults to the region of the cluster. labels (MutableMapping[str, str]): Labels to apply to backups created using this @@ -388,10 +391,12 @@ class WeeklySchedule(proto.Message): The times during the day to start a backup. The start times are assumed to be in UTC and to be an exact hour (e.g., 04:00:00). + If no start times are provided, a single fixed start time is chosen arbitrarily. days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): The days of the week to perform a backup. + If this field is left empty, the default of every day of the week is used. """ @@ -493,10 +498,11 @@ class ContinuousBackupConfig(proto.Message): This field is a member of `oneof`_ ``_enabled``. recovery_window_days (int): - The number of days backups and logs will be - retained, which determines the window of time - that data is recoverable for. If not set, it - defaults to 14 days. + The number of days that are eligible to + restore from using PITR. To support the entire + recovery window, backups and logs are retained + for one day more than the recovery window. If + not set, defaults to 14 days. encryption_config (google.cloud.alloydb_v1beta.types.EncryptionConfig): The encryption config can be specified to encrypt the backups with a customer-managed @@ -674,10 +680,11 @@ class Cluster(proto.Message): which RPC was used to create the cluster (i.e. ``CreateCluster`` vs. ``CreateSecondaryCluster`` database_version (google.cloud.alloydb_v1beta.types.DatabaseVersion): - Output only. The database engine major - version. This is an output-only field and it's - populated at the Cluster creation time. This - field cannot be changed after cluster creation. + Optional. The database engine major version. + This is an optional field and it is populated at + the Cluster creation time. If a database version + is not supplied at cluster creation time, then a + default database version will be used. network_config (google.cloud.alloydb_v1beta.types.Cluster.NetworkConfig): network (str): @@ -685,9 +692,9 @@ class Cluster(proto.Message): cluster resources are created and from which they are accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: - "projects/{project_number}/global/networks/{network_id}". - This is required to create a cluster. It can be updated, but - it cannot be removed. + "projects/{project}/global/networks/{network_id}". This is + required to create a cluster. Deprecated, use + network_config.network instead. etag (str): For Resource freshness validation (https://google.aip.dev/154) @@ -697,18 +704,19 @@ class Cluster(proto.Message): from labels. https://google.aip.dev/128 reconciling (bool): Output only. Reconciling - (https://google.aip.dev/128#reconciliation). - Set to true if the current state of Cluster does - not match the user's intended state, and the - service is actively updating the resource to - reconcile them. This can happen due to - user-triggered updates or system actions like - failover or maintenance. + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Cluster does not + match the user's intended state, and the service + is actively updating the resource to reconcile + them. This can happen due to user-triggered + updates or system actions like failover or + maintenance. initial_user (google.cloud.alloydb_v1beta.types.UserPassword): Input only. Initial user to setup during cluster creation. Required. If used in ``RestoreCluster`` this is ignored. automated_backup_policy (google.cloud.alloydb_v1beta.types.AutomatedBackupPolicy): The automated backup policy for this cluster. + If no policy is provided then the default policy will be used. If backups are supported for the cluster, the default policy takes one backup a @@ -820,11 +828,10 @@ class NetworkConfig(proto.Message): accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: "projects/{project_number}/global/networks/{network_id}". - This is required to create a cluster. It can be updated, but - it cannot be removed. + This is required to create a cluster. allocated_ip_range (str): - Optional. The name of the allocated IP range for the private - IP AlloyDB cluster. For example: + Optional. Name of the allocated IP range for the private IP + AlloyDB cluster, for example: "google-managed-services-default". If set, the instance IPs for this cluster will be created in the allocated range. The range name must comply with RFC 1035. Specifically, the name @@ -1096,10 +1103,10 @@ class Instance(proto.Message): application. reconciling (bool): Output only. Reconciling - (https://google.aip.dev/128#reconciliation). - Set to true if the current state of Instance - does not match the user's intended state, and - the service is actively updating the resource to + (https://google.aip.dev/128#reconciliation). Set + to true if the current state of Instance does + not match the user's intended state, and the + service is actively updating the resource to reconcile them. This can happen due to user-triggered updates or system actions like failover or maintenance. @@ -1117,6 +1124,9 @@ class Instance(proto.Message): non-default update policy, you must specify explicitly specify the value in each update request. + client_connection_config (google.cloud.alloydb_v1beta.types.Instance.ClientConnectionConfig): + Optional. Client connection specific + configurations """ class State(proto.Enum): @@ -1356,6 +1366,29 @@ class Mode(proto.Enum): enum="Instance.UpdatePolicy.Mode", ) + class ClientConnectionConfig(proto.Message): + r"""Client connection configuration + + Attributes: + require_connectors (bool): + Optional. Configuration to enforce connectors + only (ex: AuthProxy) connections to the + database. + ssl_config (google.cloud.alloydb_v1beta.types.SslConfig): + Optional. SSL config option for this + instance. + """ + + require_connectors: bool = proto.Field( + proto.BOOL, + number=1, + ) + ssl_config: "SslConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="SslConfig", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -1459,6 +1492,11 @@ class Mode(proto.Enum): number=22, message=UpdatePolicy, ) + client_connection_config: ClientConnectionConfig = proto.Field( + proto.MESSAGE, + number=23, + message=ClientConnectionConfig, + ) class ConnectionInfo(proto.Message): @@ -1471,9 +1509,10 @@ class ConnectionInfo(proto.Message): projects/{project}/locations/{location}/clusters/\ */instances/*/connectionInfo This field currently has no semantic meaning. ip_address (str): - Output only. The IP address for the Instance. - This is the connection endpoint for an end-user - application. + Output only. The private network IP address for the + Instance. This is the default IP for the instance and is + always created (even if enable_public_ip is set). This is + the connection endpoint for an end-user application. pem_certificate_chain (MutableSequence[str]): Output only. The pem-encoded chain that may be used to verify the X.509 certificate. @@ -1579,6 +1618,17 @@ class Backup(proto.Message): to be garbage collected. It is the duration specified by the backup's retention policy, added to the backup's create_time. + expiry_quantity (google.cloud.alloydb_v1beta.types.Backup.QuantityBasedExpiry): + Output only. The QuantityBasedExpiry of the + backup, specified by the backup's retention + policy. Once the expiry quantity is over + retention, the backup is eligible to be garbage + collected. + database_version (google.cloud.alloydb_v1beta.types.DatabaseVersion): + Output only. The database engine major + version of the cluster this backup was created + from. Any restored cluster created from this + backup will have the same database version. """ class State(proto.Enum): @@ -1625,6 +1675,41 @@ class Type(proto.Enum): AUTOMATED = 2 CONTINUOUS = 3 + class QuantityBasedExpiry(proto.Message): + r"""A backup's position in a quantity-based retention queue, of backups + with the same source cluster and type, with length, retention, + specified by the backup's retention policy. Once the position is + greater than the retention, the backup is eligible to be garbage + collected. + + Example: 5 backups from the same source cluster and type with a + quantity-based retention of 3 and denoted by backup_id (position, + retention). + + Safe: backup_5 (1, 3), backup_4, (2, 3), backup_3 (3, 3). Awaiting + garbage collection: backup_2 (4, 3), backup_1 (5, 3) + + Attributes: + retention_count (int): + Output only. The backup's position among its + backups with the same source cluster and type, + by descending chronological order create + time(i.e. newest first). + total_retention_count (int): + Output only. The length of the quantity-based + queue, specified by the backup's retention + policy. + """ + + retention_count: int = proto.Field( + proto.INT32, + number=1, + ) + total_retention_count: int = proto.Field( + proto.INT32, + number=2, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -1711,6 +1796,16 @@ class Type(proto.Enum): number=19, message=timestamp_pb2.Timestamp, ) + expiry_quantity: QuantityBasedExpiry = proto.Field( + proto.MESSAGE, + number=20, + message=QuantityBasedExpiry, + ) + database_version: "DatabaseVersion" = proto.Field( + proto.ENUM, + number=22, + enum="DatabaseVersion", + ) class SupportedDatabaseFlag(proto.Message): diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py index 679ac0a34b92..cbb773f518a8 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py @@ -198,6 +198,7 @@ class CreateSecondaryClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -206,6 +207,7 @@ class CreateSecondaryClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -258,6 +260,7 @@ class CreateClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -266,6 +269,7 @@ class CreateClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -319,6 +323,7 @@ class UpdateClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -327,6 +332,7 @@ class UpdateClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -380,6 +386,7 @@ class DeleteClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -388,6 +395,7 @@ class DeleteClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -443,6 +451,7 @@ class PromoteClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -451,6 +460,7 @@ class PromoteClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -521,6 +531,7 @@ class RestoreClusterRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -529,6 +540,7 @@ class RestoreClusterRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -696,6 +708,7 @@ class CreateInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -704,6 +717,7 @@ class CreateInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -756,6 +770,7 @@ class CreateSecondaryInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -764,6 +779,7 @@ class CreateSecondaryInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -831,6 +847,7 @@ class BatchCreateInstancesRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -839,6 +856,7 @@ class BatchCreateInstancesRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1028,6 +1046,7 @@ class UpdateInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1036,6 +1055,7 @@ class UpdateInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1089,6 +1109,7 @@ class DeleteInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1097,6 +1118,7 @@ class DeleteInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1145,6 +1167,7 @@ class FailoverInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1153,6 +1176,7 @@ class FailoverInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1195,6 +1219,7 @@ class InjectFaultRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1203,6 +1228,7 @@ class InjectFaultRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1260,6 +1286,7 @@ class RestartInstanceRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1268,6 +1295,7 @@ class RestartInstanceRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1396,6 +1424,7 @@ class CreateBackupRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1404,6 +1433,7 @@ class CreateBackupRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1455,6 +1485,7 @@ class UpdateBackupRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1463,6 +1494,7 @@ class UpdateBackupRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1514,6 +1546,7 @@ class DeleteBackupRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1522,6 +1555,7 @@ class DeleteBackupRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1636,6 +1670,7 @@ class GenerateClientCertificateRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1644,6 +1679,7 @@ class GenerateClientCertificateRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1660,6 +1696,11 @@ class GenerateClientCertificateRequest(proto.Message): an appropriate default duration. public_key (str): Optional. The public key from the client. + use_metadata_exchange (bool): + Optional. An optional hint to the endpoint to + generate a client ceritificate that can be used + by AlloyDB connectors to exchange additional + metadata with the server after TLS handshake. """ parent: str = proto.Field( @@ -1683,6 +1724,10 @@ class GenerateClientCertificateRequest(proto.Message): proto.STRING, number=5, ) + use_metadata_exchange: bool = proto.Field( + proto.BOOL, + number=6, + ) class GenerateClientCertificateResponse(proto.Message): @@ -1723,7 +1768,6 @@ class GetConnectionInfoRequest(proto.Message): parent (str): Required. The name of the parent resource. The required format is: - projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance} request_id (str): Optional. An optional request ID to identify @@ -1732,6 +1776,7 @@ class GetConnectionInfoRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1740,6 +1785,7 @@ class GetConnectionInfoRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1937,6 +1983,7 @@ class CreateUserRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1945,6 +1992,7 @@ class CreateUserRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1996,6 +2044,7 @@ class UpdateUserRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -2004,6 +2053,7 @@ class UpdateUserRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -2054,6 +2104,7 @@ class DeleteUserRequest(proto.Message): know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -2062,6 +2113,7 @@ class DeleteUserRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). diff --git a/packages/google-cloud-alloydb/noxfile.py b/packages/google-cloud-alloydb/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-alloydb/noxfile.py +++ b/packages/google-cloud-alloydb/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json index f6ee212ef7f8..1ee67cfb80a0 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.0" + "version": "0.3.1" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json index 4e642806d678..7667ab8034cf 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.0" + "version": "0.3.1" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json index aaee17ce33ad..899d84d0eada 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.3.0" + "version": "0.3.1" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/scripts/decrypt-secrets.sh b/packages/google-cloud-alloydb/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-alloydb/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-alloydb/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py index 33cac3e5c880..14b3ca7826ae 100644 --- a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py +++ b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py @@ -51,7 +51,7 @@ class alloydbCallTransformer(cst.CSTTransformer): 'delete_instance': ('name', 'request_id', 'etag', 'validate_only', ), 'delete_user': ('name', 'request_id', 'validate_only', ), 'failover_instance': ('name', 'request_id', 'validate_only', ), - 'generate_client_certificate': ('parent', 'request_id', 'pem_csr', 'cert_duration', 'public_key', ), + 'generate_client_certificate': ('parent', 'request_id', 'pem_csr', 'cert_duration', 'public_key', 'use_metadata_exchange', ), 'get_backup': ('name', ), 'get_cluster': ('name', 'view', ), 'get_connection_info': ('parent', 'request_id', ), diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py index 47c3aae1828b..42e409645459 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py @@ -45,7 +45,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore @@ -5741,6 +5741,7 @@ def test_get_backup(request_type, transport: str = "grpc"): reconciling=True, etag="etag_value", size_bytes=1089, + database_version=resources.DatabaseVersion.POSTGRES_13, ) response = client.get_backup(request) @@ -5762,6 +5763,7 @@ def test_get_backup(request_type, transport: str = "grpc"): assert response.reconciling is True assert response.etag == "etag_value" assert response.size_bytes == 1089 + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 def test_get_backup_empty_call(): @@ -5809,6 +5811,7 @@ async def test_get_backup_async( reconciling=True, etag="etag_value", size_bytes=1089, + database_version=resources.DatabaseVersion.POSTGRES_13, ) ) response = await client.get_backup(request) @@ -5831,6 +5834,7 @@ async def test_get_backup_async( assert response.reconciling is True assert response.etag == "etag_value" assert response.size_bytes == 1089 + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 @pytest.mark.asyncio @@ -9179,6 +9183,10 @@ def test_create_cluster_rest(request_type): "state": 1, "cluster_type": 1, "database_version": 1, + "network_config": { + "network": "network_value", + "allocated_ip_range": "allocated_ip_range_value", + }, "network": "network_value", "etag": "etag_value", "annotations": {}, @@ -9452,6 +9460,10 @@ def test_create_cluster_rest_bad_request( "state": 1, "cluster_type": 1, "database_version": 1, + "network_config": { + "network": "network_value", + "allocated_ip_range": "allocated_ip_range_value", + }, "network": "network_value", "etag": "etag_value", "annotations": {}, @@ -9618,6 +9630,10 @@ def test_update_cluster_rest(request_type): "state": 1, "cluster_type": 1, "database_version": 1, + "network_config": { + "network": "network_value", + "allocated_ip_range": "allocated_ip_range_value", + }, "network": "network_value", "etag": "etag_value", "annotations": {}, @@ -9871,6 +9887,10 @@ def test_update_cluster_rest_bad_request( "state": 1, "cluster_type": 1, "database_version": 1, + "network_config": { + "network": "network_value", + "allocated_ip_range": "allocated_ip_range_value", + }, "network": "network_value", "etag": "etag_value", "annotations": {}, @@ -10795,6 +10815,10 @@ def test_create_secondary_cluster_rest(request_type): "state": 1, "cluster_type": 1, "database_version": 1, + "network_config": { + "network": "network_value", + "allocated_ip_range": "allocated_ip_range_value", + }, "network": "network_value", "etag": "etag_value", "annotations": {}, @@ -11072,6 +11096,10 @@ def test_create_secondary_cluster_rest_bad_request( "state": 1, "cluster_type": 1, "database_version": 1, + "network_config": { + "network": "network_value", + "allocated_ip_range": "allocated_ip_range_value", + }, "network": "network_value", "etag": "etag_value", "annotations": {}, @@ -11881,6 +11909,10 @@ def test_create_instance_rest(request_type): "reconciling": True, "etag": "etag_value", "annotations": {}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -12122,6 +12154,10 @@ def test_create_instance_rest_bad_request( "reconciling": True, "etag": "etag_value", "annotations": {}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -12251,6 +12287,10 @@ def test_create_secondary_instance_rest(request_type): "reconciling": True, "etag": "etag_value", "annotations": {}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -12494,6 +12534,10 @@ def test_create_secondary_instance_rest_bad_request( "reconciling": True, "etag": "etag_value", "annotations": {}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -12628,6 +12672,10 @@ def test_batch_create_instances_rest(request_type): "reconciling": True, "etag": "etag_value", "annotations": {}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, }, "request_id": "request_id_value", "validate_only": True, @@ -12855,6 +12903,10 @@ def test_batch_create_instances_rest_bad_request( "reconciling": True, "etag": "etag_value", "annotations": {}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, }, "request_id": "request_id_value", "validate_only": True, @@ -12932,6 +12984,10 @@ def test_update_instance_rest(request_type): "reconciling": True, "etag": "etag_value", "annotations": {}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -13155,6 +13211,10 @@ def test_update_instance_rest_bad_request( "reconciling": True, "etag": "etag_value", "annotations": {}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -14706,6 +14766,7 @@ def test_get_backup_rest(request_type): reconciling=True, etag="etag_value", size_bytes=1089, + database_version=resources.DatabaseVersion.POSTGRES_13, ) # Wrap the value into a proper Response obj @@ -14731,6 +14792,7 @@ def test_get_backup_rest(request_type): assert response.reconciling is True assert response.etag == "etag_value" assert response.size_bytes == 1089 + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 def test_get_backup_rest_required_fields(request_type=service.GetBackupRequest): @@ -14993,6 +15055,8 @@ def test_create_backup_rest(request_type): "annotations": {}, "size_bytes": 1089, "expiry_time": {}, + "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, + "database_version": 1, } request = request_type(**request_init) @@ -15223,6 +15287,8 @@ def test_create_backup_rest_bad_request( "annotations": {}, "size_bytes": 1089, "expiry_time": {}, + "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, + "database_version": 1, } request = request_type(**request_init) @@ -15342,6 +15408,8 @@ def test_update_backup_rest(request_type): "annotations": {}, "size_bytes": 1089, "expiry_time": {}, + "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, + "database_version": 1, } request = request_type(**request_init) @@ -15552,6 +15620,8 @@ def test_update_backup_rest_bad_request( "annotations": {}, "size_bytes": 1089, "expiry_time": {}, + "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, + "database_version": 1, } request = request_type(**request_init) diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py index 496695f7fb36..fc008d94bc7f 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py @@ -45,7 +45,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py index 1da52ef851d0..80a208a78efe 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py @@ -45,7 +45,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore @@ -5741,6 +5741,7 @@ def test_get_backup(request_type, transport: str = "grpc"): reconciling=True, etag="etag_value", size_bytes=1089, + database_version=resources.DatabaseVersion.POSTGRES_13, ) response = client.get_backup(request) @@ -5762,6 +5763,7 @@ def test_get_backup(request_type, transport: str = "grpc"): assert response.reconciling is True assert response.etag == "etag_value" assert response.size_bytes == 1089 + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 def test_get_backup_empty_call(): @@ -5809,6 +5811,7 @@ async def test_get_backup_async( reconciling=True, etag="etag_value", size_bytes=1089, + database_version=resources.DatabaseVersion.POSTGRES_13, ) ) response = await client.get_backup(request) @@ -5831,6 +5834,7 @@ async def test_get_backup_async( assert response.reconciling is True assert response.etag == "etag_value" assert response.size_bytes == 1089 + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 @pytest.mark.asyncio @@ -12423,6 +12427,10 @@ def test_create_instance_rest(request_type): "etag": "etag_value", "annotations": {}, "update_policy": {"mode": 1}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -12665,6 +12673,10 @@ def test_create_instance_rest_bad_request( "etag": "etag_value", "annotations": {}, "update_policy": {"mode": 1}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -12795,6 +12807,10 @@ def test_create_secondary_instance_rest(request_type): "etag": "etag_value", "annotations": {}, "update_policy": {"mode": 1}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -13039,6 +13055,10 @@ def test_create_secondary_instance_rest_bad_request( "etag": "etag_value", "annotations": {}, "update_policy": {"mode": 1}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -13174,6 +13194,10 @@ def test_batch_create_instances_rest(request_type): "etag": "etag_value", "annotations": {}, "update_policy": {"mode": 1}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, }, "request_id": "request_id_value", "validate_only": True, @@ -13402,6 +13426,10 @@ def test_batch_create_instances_rest_bad_request( "etag": "etag_value", "annotations": {}, "update_policy": {"mode": 1}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, }, "request_id": "request_id_value", "validate_only": True, @@ -13480,6 +13508,10 @@ def test_update_instance_rest(request_type): "etag": "etag_value", "annotations": {}, "update_policy": {"mode": 1}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -13704,6 +13736,10 @@ def test_update_instance_rest_bad_request( "etag": "etag_value", "annotations": {}, "update_policy": {"mode": 1}, + "client_connection_config": { + "require_connectors": True, + "ssl_config": {"ssl_mode": 1, "ca_source": 1}, + }, } request = request_type(**request_init) @@ -15256,6 +15292,7 @@ def test_get_backup_rest(request_type): reconciling=True, etag="etag_value", size_bytes=1089, + database_version=resources.DatabaseVersion.POSTGRES_13, ) # Wrap the value into a proper Response obj @@ -15281,6 +15318,7 @@ def test_get_backup_rest(request_type): assert response.reconciling is True assert response.etag == "etag_value" assert response.size_bytes == 1089 + assert response.database_version == resources.DatabaseVersion.POSTGRES_13 def test_get_backup_rest_required_fields(request_type=service.GetBackupRequest): @@ -15544,6 +15582,8 @@ def test_create_backup_rest(request_type): "annotations": {}, "size_bytes": 1089, "expiry_time": {}, + "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, + "database_version": 1, } request = request_type(**request_init) @@ -15774,6 +15814,8 @@ def test_create_backup_rest_bad_request( "annotations": {}, "size_bytes": 1089, "expiry_time": {}, + "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, + "database_version": 1, } request = request_type(**request_init) @@ -15894,6 +15936,8 @@ def test_update_backup_rest(request_type): "annotations": {}, "size_bytes": 1089, "expiry_time": {}, + "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, + "database_version": 1, } request = request_type(**request_init) @@ -16104,6 +16148,8 @@ def test_update_backup_rest_bad_request( "annotations": {}, "size_bytes": 1089, "expiry_time": {}, + "expiry_quantity": {"retention_count": 1632, "total_retention_count": 2275}, + "database_version": 1, } request = request_type(**request_init) diff --git a/packages/google-cloud-assured-workloads/.OwlBot.yaml b/packages/google-cloud-assured-workloads/.OwlBot.yaml new file mode 100644 index 000000000000..9e3e61e84182 --- /dev/null +++ b/packages/google-cloud-assured-workloads/.OwlBot.yaml @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/assuredworkloads/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-assured-workloads/$1 + +begin-after-commit-hash: 6a5da3f1274b088752f074da5bc9e30bd1beb27e + diff --git a/packages/google-cloud-assured-workloads/.coveragerc b/packages/google-cloud-assured-workloads/.coveragerc new file mode 100644 index 000000000000..74d92878ed60 --- /dev/null +++ b/packages/google-cloud-assured-workloads/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/assuredworkloads/__init__.py + google/cloud/assuredworkloads/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-assured-workloads/.flake8 b/packages/google-cloud-assured-workloads/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-assured-workloads/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-assured-workloads/.gitignore b/packages/google-cloud-assured-workloads/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-assured-workloads/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-assured-workloads/.repo-metadata.json b/packages/google-cloud-assured-workloads/.repo-metadata.json new file mode 100644 index 000000000000..b7f6e9a854f4 --- /dev/null +++ b/packages/google-cloud-assured-workloads/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "assuredworkloads", + "name_pretty": "Assured Workloads for Government", + "product_documentation": "https://cloud.google.com/assured-workloads/", + "client_documentation": "https://cloud.google.com/python/docs/reference/assuredworkloads/latest", + "issue_tracker": "", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-assured-workloads", + "api_id": "assuredworkloads.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "assuredworkloads", + "api_description": "allows you to secure your government workloads and accelerate your path to running compliant workloads on Google Cloud with Assured Workloads for Government." +} diff --git a/packages/google-cloud-assured-workloads/CHANGELOG.md b/packages/google-cloud-assured-workloads/CHANGELOG.md new file mode 100644 index 000000000000..34cb411b2dc0 --- /dev/null +++ b/packages/google-cloud-assured-workloads/CHANGELOG.md @@ -0,0 +1,355 @@ +# Changelog + +## [1.10.3](https://github.com/googleapis/python-assured-workloads/compare/v1.10.2...v1.10.3) (2023-09-13) + + +### Documentation + +* Minor formatting ([#256](https://github.com/googleapis/python-assured-workloads/issues/256)) ([c73576e](https://github.com/googleapis/python-assured-workloads/commit/c73576e43e8d0f5721b87a22ad7e2332d9c99d82)) + +## [1.10.2](https://github.com/googleapis/python-assured-workloads/compare/v1.10.1...v1.10.2) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#246](https://github.com/googleapis/python-assured-workloads/issues/246)) ([04dbd20](https://github.com/googleapis/python-assured-workloads/commit/04dbd20319108ba4a504bdde8d4cf012f05f0baa)) + +## [1.10.1](https://github.com/googleapis/python-assured-workloads/compare/v1.10.0...v1.10.1) (2023-03-23) + + +### Documentation + +* Fix formatting of request arg in docstring ([#239](https://github.com/googleapis/python-assured-workloads/issues/239)) ([448999e](https://github.com/googleapis/python-assured-workloads/commit/448999ef82076dd9c6fa51c6c6b2fe864b1030c4)) + +## [1.10.0](https://github.com/googleapis/python-assured-workloads/compare/v1.9.1...v1.10.0) (2023-02-09) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#233](https://github.com/googleapis/python-assured-workloads/issues/233)) ([5acc49a](https://github.com/googleapis/python-assured-workloads/commit/5acc49ae8e8e4cd7d56036b64966e2fa6ca268cc)) + +## [1.9.1](https://github.com/googleapis/python-assured-workloads/compare/v1.9.0...v1.9.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([764fbd9](https://github.com/googleapis/python-assured-workloads/commit/764fbd90e858ca112ed83c0a8f46be5e5327f458)) + + +### Documentation + +* Add documentation for enums ([764fbd9](https://github.com/googleapis/python-assured-workloads/commit/764fbd90e858ca112ed83c0a8f46be5e5327f458)) + +## [1.9.0](https://github.com/googleapis/python-assured-workloads/compare/v1.8.1...v1.9.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#225](https://github.com/googleapis/python-assured-workloads/issues/225)) ([36aab41](https://github.com/googleapis/python-assured-workloads/commit/36aab41aae69539953cc38c574927bdefaeedbfa)) + +## [1.8.1](https://github.com/googleapis/python-assured-workloads/compare/v1.8.0...v1.8.1) (2022-12-08) + + +### Bug Fixes + +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([6a0ac97](https://github.com/googleapis/python-assured-workloads/commit/6a0ac977e84b20fea502dc6ffeea8af160c34411)) +* Drop usage of pkg_resources ([6a0ac97](https://github.com/googleapis/python-assured-workloads/commit/6a0ac977e84b20fea502dc6ffeea8af160c34411)) +* Fix timeout default values ([6a0ac97](https://github.com/googleapis/python-assured-workloads/commit/6a0ac977e84b20fea502dc6ffeea8af160c34411)) + + +### Documentation + +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([6a0ac97](https://github.com/googleapis/python-assured-workloads/commit/6a0ac977e84b20fea502dc6ffeea8af160c34411)) + +## [1.8.0](https://github.com/googleapis/python-assured-workloads/compare/v1.7.0...v1.8.0) (2022-11-16) + + +### Features + +* add support for `google.cloud.assuredworkloads.__version__` ([58b4377](https://github.com/googleapis/python-assured-workloads/commit/58b4377d7fdd56bca565df1bf2b4be8fb77c4c7f)) +* Add typing to proto.Message based class attributes ([3f09fb8](https://github.com/googleapis/python-assured-workloads/commit/3f09fb885e12d866edefabe18e7e02ca5b8cee20)) + + +### Bug Fixes + +* Add dict typing for client_options ([58b4377](https://github.com/googleapis/python-assured-workloads/commit/58b4377d7fdd56bca565df1bf2b4be8fb77c4c7f)) +* **deps:** require google-api-core >=1.33.2 ([58b4377](https://github.com/googleapis/python-assured-workloads/commit/58b4377d7fdd56bca565df1bf2b4be8fb77c4c7f)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([3f09fb8](https://github.com/googleapis/python-assured-workloads/commit/3f09fb885e12d866edefabe18e7e02ca5b8cee20)) + +## [1.7.0](https://github.com/googleapis/python-assured-workloads/compare/v1.6.1...v1.7.0) (2022-10-14) + + +### Features + +* Add new field for exception audit log link ([#211](https://github.com/googleapis/python-assured-workloads/issues/211)) ([9fafdd4](https://github.com/googleapis/python-assured-workloads/commit/9fafdd4317417673ca86727d7fdc603e7ba1bb35)) + +## [1.6.1](https://github.com/googleapis/python-assured-workloads/compare/v1.6.0...v1.6.1) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#209](https://github.com/googleapis/python-assured-workloads/issues/209)) ([a420318](https://github.com/googleapis/python-assured-workloads/commit/a420318a79c80e0be11c335475f8f759df92a93f)) + +## [1.6.0](https://github.com/googleapis/python-assured-workloads/compare/v1.5.0...v1.6.0) (2022-10-03) + + +### Features + +* Add apis for AssuredWorkload monitoring feature and to restrict allowed resources ([#207](https://github.com/googleapis/python-assured-workloads/issues/207)) ([a38f6af](https://github.com/googleapis/python-assured-workloads/commit/a38f6af69bd5d2e17493de56ed7a5e26660be9f8)) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#206](https://github.com/googleapis/python-assured-workloads/issues/206)) ([daf4770](https://github.com/googleapis/python-assured-workloads/commit/daf4770a8acdffd7265a39ee7bf82d9eada106db)) + +## [1.5.0](https://github.com/googleapis/python-assured-workloads/compare/v1.4.2...v1.5.0) (2022-09-02) + + +### Features + +* Add compliant_but_disallowed_services field to the v1beta1 Workload proto ([#201](https://github.com/googleapis/python-assured-workloads/issues/201)) ([a8c4a1a](https://github.com/googleapis/python-assured-workloads/commit/a8c4a1a11b2c51786fb1fd6ab6e7e99696c92646)) + +## [1.4.2](https://github.com/googleapis/python-assured-workloads/compare/v1.4.1...v1.4.2) (2022-08-24) + + +### Bug Fixes + +* **v1beta1:** Removed `restrict_allowed_services`, `RestrictAllowedServicesRequest`, `RestrictAllowedServicesResponse` ([b07a36a](https://github.com/googleapis/python-assured-workloads/commit/b07a36abb42f6232dcb0a0df7a4211437b3f830e)) + + +### Documentation + +* **v1beta1:** Update analyzeWorkloadMove documentation ([b07a36a](https://github.com/googleapis/python-assured-workloads/commit/b07a36abb42f6232dcb0a0df7a4211437b3f830e)) + +## [1.4.1](https://github.com/googleapis/python-assured-workloads/compare/v1.4.0...v1.4.1) (2022-08-15) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#186](https://github.com/googleapis/python-assured-workloads/issues/186)) ([ae92f13](https://github.com/googleapis/python-assured-workloads/commit/ae92f13919d7ca2d9415e899a78f18914403a1e2)) +* **deps:** require proto-plus >= 1.22.0 ([ae92f13](https://github.com/googleapis/python-assured-workloads/commit/ae92f13919d7ca2d9415e899a78f18914403a1e2)) + +## [1.4.0](https://github.com/googleapis/python-assured-workloads/compare/v1.3.0...v1.4.0) (2022-07-19) + + +### Features + +* **v1beta1:** AnalyzeWorkloadMove returns information about org policy differences between the project and target folder ([7afe126](https://github.com/googleapis/python-assured-workloads/commit/7afe126739e9be49f1af5b499f35873f9722e721)) +* **v1beta1:** Update method signature of analyzeWorkloadMove to accept project as source ([#182](https://github.com/googleapis/python-assured-workloads/issues/182)) ([7afe126](https://github.com/googleapis/python-assured-workloads/commit/7afe126739e9be49f1af5b499f35873f9722e721)) + +## [1.3.0](https://github.com/googleapis/python-assured-workloads/compare/v1.2.3...v1.3.0) (2022-07-16) + + +### Features + +* add audience parameter ([ea32aaa](https://github.com/googleapis/python-assured-workloads/commit/ea32aaa717ef0e2fce009b207fb55c0dd2aa358e)) +* ITAR June Preview Launch ([ea32aaa](https://github.com/googleapis/python-assured-workloads/commit/ea32aaa717ef0e2fce009b207fb55c0dd2aa358e)) +* **v1beta1:** Removed _v1beta1 suffix from proto file names ([#174](https://github.com/googleapis/python-assured-workloads/issues/174)) ([03d456b](https://github.com/googleapis/python-assured-workloads/commit/03d456b6dbe5968abc07161f5146fbe77f79a527)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#177](https://github.com/googleapis/python-assured-workloads/issues/177)) ([ea32aaa](https://github.com/googleapis/python-assured-workloads/commit/ea32aaa717ef0e2fce009b207fb55c0dd2aa358e)) +* require python 3.7+ ([#179](https://github.com/googleapis/python-assured-workloads/issues/179)) ([5a92db8](https://github.com/googleapis/python-assured-workloads/commit/5a92db8797ee2e489c4fa38c36eab315aadc84b2)) + +## [1.2.3](https://github.com/googleapis/python-assured-workloads/compare/v1.2.2...v1.2.3) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#166](https://github.com/googleapis/python-assured-workloads/issues/166)) ([8395361](https://github.com/googleapis/python-assured-workloads/commit/8395361fe0258c98b2430f36eb0789c976a977da)) + + +### Documentation + +* fix changelog header to consistent size ([#167](https://github.com/googleapis/python-assured-workloads/issues/167)) ([ace4d91](https://github.com/googleapis/python-assured-workloads/commit/ace4d9142a1c8721419aca0a0661f9f654cd352c)) + +## [1.2.2](https://github.com/googleapis/python-assured-workloads/compare/v1.2.1...v1.2.2) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#136](https://github.com/googleapis/python-assured-workloads/issues/136)) ([a287c38](https://github.com/googleapis/python-assured-workloads/commit/a287c38712eb08ef30f80e5ab64203926cb8f12a)) +* **deps:** require proto-plus>=1.15.0 ([a287c38](https://github.com/googleapis/python-assured-workloads/commit/a287c38712eb08ef30f80e5ab64203926cb8f12a)) + +## [1.2.1](https://github.com/googleapis/python-assured-workloads/compare/v1.2.0...v1.2.1) (2022-02-26) + + +### Documentation + +* add autogenerated code snippets ([70e74a3](https://github.com/googleapis/python-assured-workloads/commit/70e74a3f8f28f43171045da3861b4c8e92a45031)) + +## [1.2.0](https://github.com/googleapis/python-assured-workloads/compare/v1.1.0...v1.2.0) (2022-02-04) + + +### Features + +* add api key support ([#120](https://github.com/googleapis/python-assured-workloads/issues/120)) ([4826ab9](https://github.com/googleapis/python-assured-workloads/commit/4826ab9bc46c4eff4be6faf03c276f4506d154b7)) + +## [1.1.0](https://github.com/googleapis/python-assured-workloads/compare/v1.0.0...v1.1.0) (2022-01-13) + + +### Features + +* EU Regions and Support With Sovereign Controls ([#110](https://github.com/googleapis/python-assured-workloads/issues/110)) ([a0676ef](https://github.com/googleapis/python-assured-workloads/commit/a0676ef0f3d35d27886b7e624973de7942b34214)) + +## [1.0.0](https://www.github.com/googleapis/python-assured-workloads/compare/v0.8.0...v1.0.0) (2021-11-01) + + +### Features + +* bump release level to production/stable ([#92](https://www.github.com/googleapis/python-assured-workloads/issues/92)) ([c346fbb](https://www.github.com/googleapis/python-assured-workloads/commit/c346fbb3c3c4388100ba08d5a30889e96727e351)) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([2bc0174](https://www.github.com/googleapis/python-assured-workloads/commit/2bc01744f9bbc48f3e5d1de1dd196571fc1494db)) +* **deps:** require google-api-core >= 1.28.0 ([2bc0174](https://www.github.com/googleapis/python-assured-workloads/commit/2bc01744f9bbc48f3e5d1de1dd196571fc1494db)) + + +### Documentation + +* list oneofs in docstring ([2bc0174](https://www.github.com/googleapis/python-assured-workloads/commit/2bc01744f9bbc48f3e5d1de1dd196571fc1494db)) + +## [0.8.0](https://www.github.com/googleapis/python-assured-workloads/compare/v0.7.2...v0.8.0) (2021-10-11) + + +### Features + +* add context manager support in client ([#94](https://www.github.com/googleapis/python-assured-workloads/issues/94)) ([1dd0e98](https://www.github.com/googleapis/python-assured-workloads/commit/1dd0e982ae7268996ecbf534ed310e446f2d0070)) +* add trove classifier for python 3.10 ([#97](https://www.github.com/googleapis/python-assured-workloads/issues/97)) ([4db7fe0](https://www.github.com/googleapis/python-assured-workloads/commit/4db7fe0f9655b09c47eef5142a8c0a7ba0e270d9)) + +## [0.7.2](https://www.github.com/googleapis/python-assured-workloads/compare/v0.7.1...v0.7.2) (2021-09-30) + + +### Bug Fixes + +* improper types in pagers generation ([a78556b](https://www.github.com/googleapis/python-assured-workloads/commit/a78556b3481b7e000a209ce0495b52495769cb9a)) + +## [0.7.1](https://www.github.com/googleapis/python-assured-workloads/compare/v0.7.0...v0.7.1) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([5c4dd09](https://www.github.com/googleapis/python-assured-workloads/commit/5c4dd0988b444175a3e6cc5c4b3e183d56cefaba)) + +## [0.7.0](https://www.github.com/googleapis/python-assured-workloads/compare/v0.6.0...v0.7.0) (2021-09-20) + + +### Features + +* assuredworkloads v1 public protos ([#82](https://www.github.com/googleapis/python-assured-workloads/issues/82)) ([04dd627](https://www.github.com/googleapis/python-assured-workloads/commit/04dd627d5d3862b055d661c2a1bf9a0f6b5fc4e4)) + +## [0.6.0](https://www.github.com/googleapis/python-assured-workloads/compare/v0.5.0...v0.6.0) (2021-08-30) + + +### Features + +* Add Canada regions and support compliance regime ([#73](https://www.github.com/googleapis/python-assured-workloads/issues/73)) ([b62f7c7](https://www.github.com/googleapis/python-assured-workloads/commit/b62f7c720d198741673cc93d452d0ac9067cd3c3)) +* display_name is added to ResourceSettings ([b62f7c7](https://www.github.com/googleapis/python-assured-workloads/commit/b62f7c720d198741673cc93d452d0ac9067cd3c3)) +* resource_settings is added to CreateWorkloadOperationMetadata ([b62f7c7](https://www.github.com/googleapis/python-assured-workloads/commit/b62f7c720d198741673cc93d452d0ac9067cd3c3)) +* ResourceType CONSUMER_FOLDER and KEYRING are added ([b62f7c7](https://www.github.com/googleapis/python-assured-workloads/commit/b62f7c720d198741673cc93d452d0ac9067cd3c3)) + + +### Bug Fixes + +* billing_account is now optional in Workload ([b62f7c7](https://www.github.com/googleapis/python-assured-workloads/commit/b62f7c720d198741673cc93d452d0ac9067cd3c3)) +* ResourceType CONSUMER_PROJECT is deprecated ([b62f7c7](https://www.github.com/googleapis/python-assured-workloads/commit/b62f7c720d198741673cc93d452d0ac9067cd3c3)) + +## [0.5.0](https://www.github.com/googleapis/python-assured-workloads/compare/v0.4.2...v0.5.0) (2021-07-28) + + +### Features + +* Add EU Regions And Support compliance regime ([#67](https://www.github.com/googleapis/python-assured-workloads/issues/67)) ([a370ad5](https://www.github.com/googleapis/python-assured-workloads/commit/a370ad5c1c7525544f3e5a83e84e0c05ed1851e2)) + +## [0.4.2](https://www.github.com/googleapis/python-assured-workloads/compare/v0.4.1...v0.4.2) (2021-07-27) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#64](https://www.github.com/googleapis/python-assured-workloads/issues/64)) ([c7e4331](https://www.github.com/googleapis/python-assured-workloads/commit/c7e43317be9e68508449a0f9cb548d1bd5904f1e)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#60](https://www.github.com/googleapis/python-assured-workloads/issues/60)) ([b161d65](https://www.github.com/googleapis/python-assured-workloads/commit/b161d658c8cdf294f72181b368e9e8df3529c392)) + + +### Miscellaneous Chores + +* release as 0.4.2 ([#65](https://www.github.com/googleapis/python-assured-workloads/issues/65)) ([8f8f538](https://www.github.com/googleapis/python-assured-workloads/commit/8f8f53852fd2e3ae4a917cdd7c37125fb01043a4)) + +## [0.4.1](https://www.github.com/googleapis/python-assured-workloads/compare/v0.4.0...v0.4.1) (2021-07-20) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#59](https://www.github.com/googleapis/python-assured-workloads/issues/59)) ([5113968](https://www.github.com/googleapis/python-assured-workloads/commit/5113968fa3e779a1e1d69f3642d9cd2f7ebcbe91)) + +## [0.4.0](https://www.github.com/googleapis/python-assured-workloads/compare/v0.3.1...v0.4.0) (2021-06-30) + + +### Features + +* add always_use_jwt_access ([#52](https://www.github.com/googleapis/python-assured-workloads/issues/52)) ([9533d55](https://www.github.com/googleapis/python-assured-workloads/commit/9533d55b45ca854800cd2a15c136dc0247465fea)) + + +### Bug Fixes + +* disable always_use_jwt_access ([efac3ed](https://www.github.com/googleapis/python-assured-workloads/commit/efac3eddda13b62f01a451e0314b544d0f97cac8)) +* disable always_use_jwt_access ([#56](https://www.github.com/googleapis/python-assured-workloads/issues/56)) ([efac3ed](https://www.github.com/googleapis/python-assured-workloads/commit/efac3eddda13b62f01a451e0314b544d0f97cac8)) + + +### Documentation + +* fix typo in docs/index.rst ([#43](https://www.github.com/googleapis/python-assured-workloads/issues/43)) ([df2ea64](https://www.github.com/googleapis/python-assured-workloads/commit/df2ea6472b097b53ee7c278051ad4bd11e85ef7b)) +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-assured-workloads/issues/1127)) ([#47](https://www.github.com/googleapis/python-assured-workloads/issues/47)) ([0f28736](https://www.github.com/googleapis/python-assured-workloads/commit/0f28736ad7d1966f41410d5d571fb56b6fef91df)) + +## [0.3.1](https://www.github.com/googleapis/python-assured-workloads/compare/v0.3.0...v0.3.1) (2021-06-16) + + +### Bug Fixes + +* exclude docs and tests from package ([#44](https://www.github.com/googleapis/python-assured-workloads/issues/44)) ([d3dda4c](https://www.github.com/googleapis/python-assured-workloads/commit/d3dda4c019cc5fa8877b59d8454273f841a73d88)) + +## [0.3.0](https://www.github.com/googleapis/python-assured-workloads/compare/v0.2.1...v0.3.0) (2021-05-28) + + +### Features + +* Add 'resource_settings' field to provide custom properties (ids) for the provisioned projects ([6ff8af6](https://www.github.com/googleapis/python-assured-workloads/commit/6ff8af6abc18d74d624e71b547f921b444435310)) +* add HIPAA and HITRUST compliance regimes ([#13](https://www.github.com/googleapis/python-assured-workloads/issues/13)) ([6ff8af6](https://www.github.com/googleapis/python-assured-workloads/commit/6ff8af6abc18d74d624e71b547f921b444435310)) +* support self-signed JWT flow for service accounts ([a28c728](https://www.github.com/googleapis/python-assured-workloads/commit/a28c728c4f8f50a3e5300d1cbfa7ed7262db1f9c)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([a28c728](https://www.github.com/googleapis/python-assured-workloads/commit/a28c728c4f8f50a3e5300d1cbfa7ed7262db1f9c)) +* **deps:** add packaging requirement ([#37](https://www.github.com/googleapis/python-assured-workloads/issues/37)) ([ae6197c](https://www.github.com/googleapis/python-assured-workloads/commit/ae6197cb4761e2c7d1cab80721d7f3b0c16375f1)) +* fix retry deadlines ([6ff8af6](https://www.github.com/googleapis/python-assured-workloads/commit/6ff8af6abc18d74d624e71b547f921b444435310)) + +## [0.2.1](https://www.github.com/googleapis/python-assured-workloads/compare/v0.2.0...v0.2.1) (2021-02-11) + + +### Bug Fixes + +* remove client recv msg limit fix: add enums to `types/__init__.py` ([#9](https://www.github.com/googleapis/python-assured-workloads/issues/9)) ([ebd9505](https://www.github.com/googleapis/python-assured-workloads/commit/ebd950596feaa2ebd90334a0ace89f70ce76b381)) + +## [0.2.0](https://www.github.com/googleapis/python-assured-workloads/compare/v0.1.0...v0.2.0) (2020-11-17) + + +### Features + +* add ``provisioned_resources_parent`` and ``kms_settings``; add common resource path helper methods ([daaff1f](https://www.github.com/googleapis/python-assured-workloads/commit/daaff1f32d3a1a44f0ba27ab3ecf4f8f0fbb6d3f)) + +## 0.1.0 (2020-10-02) + + +### Features + +* generate v1beta1 ([999fa05](https://www.github.com/googleapis/python-assured-workloads/commit/999fa05075110ef9f915d08427731482e2bfc373)) diff --git a/packages/google-cloud-assured-workloads/CODE_OF_CONDUCT.md b/packages/google-cloud-assured-workloads/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-assured-workloads/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-assured-workloads/CONTRIBUTING.rst b/packages/google-cloud-assured-workloads/CONTRIBUTING.rst new file mode 100644 index 000000000000..7db6209f1366 --- /dev/null +++ b/packages/google-cloud-assured-workloads/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-assured-workloads + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-assured-workloads/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-assured-workloads/LICENSE b/packages/google-cloud-assured-workloads/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-assured-workloads/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-assured-workloads/MANIFEST.in b/packages/google-cloud-assured-workloads/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-assured-workloads/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-assured-workloads/README.rst b/packages/google-cloud-assured-workloads/README.rst new file mode 100644 index 000000000000..59665a4cb254 --- /dev/null +++ b/packages/google-cloud-assured-workloads/README.rst @@ -0,0 +1,108 @@ +Python Client for Assured Workloads for Government +================================================== + +|stable| |pypi| |versions| + +`Assured Workloads for Government`_: allows you to secure your government workloads and accelerate your path to running compliant workloads on Google Cloud with Assured Workloads for Government. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-assured-workloads.svg + :target: https://pypi.org/project/google-cloud-assured-workloads/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-assured-workloads.svg + :target: https://pypi.org/project/google-cloud-assured-workloads/ +.. _Assured Workloads for Government: https://cloud.google.com/assured-workloads/ +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/assuredworkloads/latest +.. _Product Documentation: https://cloud.google.com/assured-workloads/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Assured Workloads for Government.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Assured Workloads for Government.: https://cloud.google.com/assured-workloads/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-assured-workloads + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-assured-workloads + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Assured Workloads for Government + to see other available methods on the client. +- Read the `Assured Workloads for Government Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Assured Workloads for Government Product documentation: https://cloud.google.com/assured-workloads/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-assured-workloads/SECURITY.md b/packages/google-cloud-assured-workloads/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-assured-workloads/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-assured-workloads/assuredworkloads-v1-py.tar.gz b/packages/google-cloud-assured-workloads/assuredworkloads-v1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-assured-workloads/docs/CHANGELOG.md b/packages/google-cloud-assured-workloads/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-assured-workloads/docs/README.rst b/packages/google-cloud-assured-workloads/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-assured-workloads/docs/_static/custom.css b/packages/google-cloud-assured-workloads/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-assured-workloads/docs/_templates/layout.html b/packages/google-cloud-assured-workloads/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1/assured_workloads_service.rst b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1/assured_workloads_service.rst new file mode 100644 index 000000000000..dd32bca5e24d --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1/assured_workloads_service.rst @@ -0,0 +1,10 @@ +AssuredWorkloadsService +----------------------------------------- + +.. automodule:: google.cloud.assuredworkloads_v1.services.assured_workloads_service + :members: + :inherited-members: + +.. automodule:: google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1/services.rst b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1/services.rst new file mode 100644 index 000000000000..18a2c7c84d8e --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Assuredworkloads v1 API +================================================= +.. toctree:: + :maxdepth: 2 + + assured_workloads_service diff --git a/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1/types.rst b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1/types.rst new file mode 100644 index 000000000000..5ecad0d4d824 --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Assuredworkloads v1 API +============================================== + +.. automodule:: google.cloud.assuredworkloads_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1beta1/assured_workloads_service.rst b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1beta1/assured_workloads_service.rst new file mode 100644 index 000000000000..bd798cb3fa32 --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1beta1/assured_workloads_service.rst @@ -0,0 +1,10 @@ +AssuredWorkloadsService +----------------------------------------- + +.. automodule:: google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service + :members: + :inherited-members: + +.. automodule:: google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1beta1/services.rst b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1beta1/services.rst new file mode 100644 index 000000000000..abfac62c50d5 --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1beta1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Assuredworkloads v1beta1 API +====================================================== +.. toctree:: + :maxdepth: 2 + + assured_workloads_service diff --git a/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1beta1/types.rst b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1beta1/types.rst new file mode 100644 index 000000000000..070395b22dfb --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/assuredworkloads_v1beta1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Assuredworkloads v1beta1 API +=================================================== + +.. automodule:: google.cloud.assuredworkloads_v1beta1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-assured-workloads/docs/conf.py b/packages/google-cloud-assured-workloads/docs/conf.py new file mode 100644 index 000000000000..da70400324dd --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-assured-workloads documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-assured-workloads" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-assured-workloads", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-assured-workloads-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-assured-workloads.tex", + "google-cloud-assured-workloads Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-assured-workloads", + "google-cloud-assured-workloads Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-assured-workloads", + "google-cloud-assured-workloads Documentation", + author, + "google-cloud-assured-workloads", + "google-cloud-assured-workloads Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-assured-workloads/docs/index.rst b/packages/google-cloud-assured-workloads/docs/index.rst new file mode 100644 index 000000000000..8534aa30a00c --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/index.rst @@ -0,0 +1,34 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of Assured Workloads for Government. +By default, you will get version ``assuredworkloads_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + assuredworkloads_v1/services + assuredworkloads_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + assuredworkloads_v1beta1/services + assuredworkloads_v1beta1/types + + +Changelog +--------- + +For a list of all ``google-cloud-assured-workloads`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-assured-workloads/docs/multiprocessing.rst b/packages/google-cloud-assured-workloads/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-assured-workloads/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/__init__.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/__init__.py new file mode 100644 index 000000000000..bf5f9b1b6a52 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/__init__.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.assuredworkloads import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.assuredworkloads_v1.services.assured_workloads_service.async_client import ( + AssuredWorkloadsServiceAsyncClient, +) +from google.cloud.assuredworkloads_v1.services.assured_workloads_service.client import ( + AssuredWorkloadsServiceClient, +) +from google.cloud.assuredworkloads_v1.types.assuredworkloads import ( + AcknowledgeViolationRequest, + AcknowledgeViolationResponse, + CreateWorkloadOperationMetadata, + CreateWorkloadRequest, + DeleteWorkloadRequest, + GetViolationRequest, + GetWorkloadRequest, + ListViolationsRequest, + ListViolationsResponse, + ListWorkloadsRequest, + ListWorkloadsResponse, + RestrictAllowedResourcesRequest, + RestrictAllowedResourcesResponse, + TimeWindow, + UpdateWorkloadRequest, + Violation, + Workload, +) + +__all__ = ( + "AssuredWorkloadsServiceClient", + "AssuredWorkloadsServiceAsyncClient", + "AcknowledgeViolationRequest", + "AcknowledgeViolationResponse", + "CreateWorkloadOperationMetadata", + "CreateWorkloadRequest", + "DeleteWorkloadRequest", + "GetViolationRequest", + "GetWorkloadRequest", + "ListViolationsRequest", + "ListViolationsResponse", + "ListWorkloadsRequest", + "ListWorkloadsResponse", + "RestrictAllowedResourcesRequest", + "RestrictAllowedResourcesResponse", + "TimeWindow", + "UpdateWorkloadRequest", + "Violation", + "Workload", +) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py new file mode 100644 index 000000000000..2d55bb3824da --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.10.3" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/py.typed b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/py.typed new file mode 100644 index 000000000000..3762b50eeb3d --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-assured-workloads package uses inline types. diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/__init__.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/__init__.py new file mode 100644 index 000000000000..17f549033a63 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/__init__.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.assuredworkloads_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.assured_workloads_service import ( + AssuredWorkloadsServiceAsyncClient, + AssuredWorkloadsServiceClient, +) +from .types.assuredworkloads import ( + AcknowledgeViolationRequest, + AcknowledgeViolationResponse, + CreateWorkloadOperationMetadata, + CreateWorkloadRequest, + DeleteWorkloadRequest, + GetViolationRequest, + GetWorkloadRequest, + ListViolationsRequest, + ListViolationsResponse, + ListWorkloadsRequest, + ListWorkloadsResponse, + RestrictAllowedResourcesRequest, + RestrictAllowedResourcesResponse, + TimeWindow, + UpdateWorkloadRequest, + Violation, + Workload, +) + +__all__ = ( + "AssuredWorkloadsServiceAsyncClient", + "AcknowledgeViolationRequest", + "AcknowledgeViolationResponse", + "AssuredWorkloadsServiceClient", + "CreateWorkloadOperationMetadata", + "CreateWorkloadRequest", + "DeleteWorkloadRequest", + "GetViolationRequest", + "GetWorkloadRequest", + "ListViolationsRequest", + "ListViolationsResponse", + "ListWorkloadsRequest", + "ListWorkloadsResponse", + "RestrictAllowedResourcesRequest", + "RestrictAllowedResourcesResponse", + "TimeWindow", + "UpdateWorkloadRequest", + "Violation", + "Workload", +) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_metadata.json b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_metadata.json new file mode 100644 index 000000000000..6cd64bb258dd --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_metadata.json @@ -0,0 +1,163 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.assuredworkloads_v1", + "protoPackage": "google.cloud.assuredworkloads.v1", + "schema": "1.0", + "services": { + "AssuredWorkloadsService": { + "clients": { + "grpc": { + "libraryClient": "AssuredWorkloadsServiceClient", + "rpcs": { + "AcknowledgeViolation": { + "methods": [ + "acknowledge_violation" + ] + }, + "CreateWorkload": { + "methods": [ + "create_workload" + ] + }, + "DeleteWorkload": { + "methods": [ + "delete_workload" + ] + }, + "GetViolation": { + "methods": [ + "get_violation" + ] + }, + "GetWorkload": { + "methods": [ + "get_workload" + ] + }, + "ListViolations": { + "methods": [ + "list_violations" + ] + }, + "ListWorkloads": { + "methods": [ + "list_workloads" + ] + }, + "RestrictAllowedResources": { + "methods": [ + "restrict_allowed_resources" + ] + }, + "UpdateWorkload": { + "methods": [ + "update_workload" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AssuredWorkloadsServiceAsyncClient", + "rpcs": { + "AcknowledgeViolation": { + "methods": [ + "acknowledge_violation" + ] + }, + "CreateWorkload": { + "methods": [ + "create_workload" + ] + }, + "DeleteWorkload": { + "methods": [ + "delete_workload" + ] + }, + "GetViolation": { + "methods": [ + "get_violation" + ] + }, + "GetWorkload": { + "methods": [ + "get_workload" + ] + }, + "ListViolations": { + "methods": [ + "list_violations" + ] + }, + "ListWorkloads": { + "methods": [ + "list_workloads" + ] + }, + "RestrictAllowedResources": { + "methods": [ + "restrict_allowed_resources" + ] + }, + "UpdateWorkload": { + "methods": [ + "update_workload" + ] + } + } + }, + "rest": { + "libraryClient": "AssuredWorkloadsServiceClient", + "rpcs": { + "AcknowledgeViolation": { + "methods": [ + "acknowledge_violation" + ] + }, + "CreateWorkload": { + "methods": [ + "create_workload" + ] + }, + "DeleteWorkload": { + "methods": [ + "delete_workload" + ] + }, + "GetViolation": { + "methods": [ + "get_violation" + ] + }, + "GetWorkload": { + "methods": [ + "get_workload" + ] + }, + "ListViolations": { + "methods": [ + "list_violations" + ] + }, + "ListWorkloads": { + "methods": [ + "list_workloads" + ] + }, + "RestrictAllowedResources": { + "methods": [ + "restrict_allowed_resources" + ] + }, + "UpdateWorkload": { + "methods": [ + "update_workload" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py new file mode 100644 index 000000000000..2d55bb3824da --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.10.3" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/py.typed b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/py.typed new file mode 100644 index 000000000000..3762b50eeb3d --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-assured-workloads package uses inline types. diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/__init__.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/__init__.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/__init__.py new file mode 100644 index 000000000000..963d19bb2128 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AssuredWorkloadsServiceAsyncClient +from .client import AssuredWorkloadsServiceClient + +__all__ = ( + "AssuredWorkloadsServiceClient", + "AssuredWorkloadsServiceAsyncClient", +) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py new file mode 100644 index 000000000000..dbaf532b1caf --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py @@ -0,0 +1,1302 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.assuredworkloads_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.assuredworkloads_v1.services.assured_workloads_service import pagers +from google.cloud.assuredworkloads_v1.types import assuredworkloads + +from .client import AssuredWorkloadsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AssuredWorkloadsServiceTransport +from .transports.grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport + + +class AssuredWorkloadsServiceAsyncClient: + """Service to manage AssuredWorkloads.""" + + _client: AssuredWorkloadsServiceClient + + DEFAULT_ENDPOINT = AssuredWorkloadsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT + + violation_path = staticmethod(AssuredWorkloadsServiceClient.violation_path) + parse_violation_path = staticmethod( + AssuredWorkloadsServiceClient.parse_violation_path + ) + workload_path = staticmethod(AssuredWorkloadsServiceClient.workload_path) + parse_workload_path = staticmethod( + AssuredWorkloadsServiceClient.parse_workload_path + ) + common_billing_account_path = staticmethod( + AssuredWorkloadsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AssuredWorkloadsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AssuredWorkloadsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + AssuredWorkloadsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AssuredWorkloadsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AssuredWorkloadsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + AssuredWorkloadsServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + AssuredWorkloadsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + AssuredWorkloadsServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + AssuredWorkloadsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssuredWorkloadsServiceAsyncClient: The constructed client. + """ + return AssuredWorkloadsServiceClient.from_service_account_info.__func__(AssuredWorkloadsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssuredWorkloadsServiceAsyncClient: The constructed client. + """ + return AssuredWorkloadsServiceClient.from_service_account_file.__func__(AssuredWorkloadsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AssuredWorkloadsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AssuredWorkloadsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AssuredWorkloadsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(AssuredWorkloadsServiceClient).get_transport_class, + type(AssuredWorkloadsServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AssuredWorkloadsServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the assured workloads service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AssuredWorkloadsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AssuredWorkloadsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_workload( + self, + request: Optional[Union[assuredworkloads.CreateWorkloadRequest, dict]] = None, + *, + parent: Optional[str] = None, + workload: Optional[assuredworkloads.Workload] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates Assured Workload. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + async def sample_create_workload(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + workload = assuredworkloads_v1.Workload() + workload.display_name = "display_name_value" + workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" + + request = assuredworkloads_v1.CreateWorkloadRequest( + parent="parent_value", + workload=workload, + ) + + # Make the request + operation = client.create_workload(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1.types.CreateWorkloadRequest, dict]]): + The request object. Request for creating a workload. + parent (:class:`str`): + Required. The resource name of the new Workload's + parent. Must be of the form + ``organizations/{org_id}/locations/{location_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workload (:class:`google.cloud.assuredworkloads_v1.types.Workload`): + Required. Assured Workload to create + This corresponds to the ``workload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.assuredworkloads_v1.types.Workload` A Workload object for managing highly regulated workloads of cloud + customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workload]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.CreateWorkloadRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workload is not None: + request.workload = workload + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_workload, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + assuredworkloads.Workload, + metadata_type=assuredworkloads.CreateWorkloadOperationMetadata, + ) + + # Done; return the response. + return response + + async def update_workload( + self, + request: Optional[Union[assuredworkloads.UpdateWorkloadRequest, dict]] = None, + *, + workload: Optional[assuredworkloads.Workload] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + r"""Updates an existing workload. Currently allows updating of + workload display_name and labels. For force updates don't set + etag field in the Workload. Only one update operation per + workload can be in progress. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + async def sample_update_workload(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + workload = assuredworkloads_v1.Workload() + workload.display_name = "display_name_value" + workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" + + request = assuredworkloads_v1.UpdateWorkloadRequest( + workload=workload, + ) + + # Make the request + response = await client.update_workload(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1.types.UpdateWorkloadRequest, dict]]): + The request object. Request for Updating a workload. + workload (:class:`google.cloud.assuredworkloads_v1.types.Workload`): + Required. The workload to update. The workload's + ``name`` field is used to identify the workload to be + updated. Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + + This corresponds to the ``workload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.types.Workload: + A Workload object for managing highly + regulated workloads of cloud customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workload, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.UpdateWorkloadRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workload is not None: + request.workload = workload + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_workload, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workload.name", request.workload.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def restrict_allowed_resources( + self, + request: Optional[ + Union[assuredworkloads.RestrictAllowedResourcesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.RestrictAllowedResourcesResponse: + r"""Restrict the list of resources allowed in the + Workload environment. The current list of allowed + products can be found at + https://cloud.google.com/assured-workloads/docs/supported-products + In addition to assuredworkloads.workload.update + permission, the user should also have + orgpolicy.policy.set permission on the folder resource + to use this functionality. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + async def sample_restrict_allowed_resources(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.RestrictAllowedResourcesRequest( + name="name_value", + restriction_type="ALLOW_COMPLIANT_RESOURCES", + ) + + # Make the request + response = await client.restrict_allowed_resources(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesRequest, dict]]): + The request object. Request for restricting list of + available resources in Workload + environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesResponse: + Response for restricting the list of + allowed resources. + + """ + # Create or coerce a protobuf request object. + request = assuredworkloads.RestrictAllowedResourcesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restrict_allowed_resources, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_workload( + self, + request: Optional[Union[assuredworkloads.DeleteWorkloadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the workload. Make sure that workload's direct children + are already in a deleted state, otherwise the request will fail + with a FAILED_PRECONDITION error. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + async def sample_delete_workload(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.DeleteWorkloadRequest( + name="name_value", + ) + + # Make the request + await client.delete_workload(request=request) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1.types.DeleteWorkloadRequest, dict]]): + The request object. Request for deleting a Workload. + name (:class:`str`): + Required. The ``name`` field is used to identify the + workload. Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.DeleteWorkloadRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_workload, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_workload( + self, + request: Optional[Union[assuredworkloads.GetWorkloadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + r"""Gets Assured Workload associated with a CRM Node + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + async def sample_get_workload(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.GetWorkloadRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workload(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1.types.GetWorkloadRequest, dict]]): + The request object. Request for fetching a workload. + name (:class:`str`): + Required. The resource name of the Workload to fetch. + This is the workload's relative path in the API, + formatted as + "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". + For example, + "organizations/123/locations/us-east1/workloads/assured-workload-1". + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.types.Workload: + A Workload object for managing highly + regulated workloads of cloud customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.GetWorkloadRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_workload, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_workloads( + self, + request: Optional[Union[assuredworkloads.ListWorkloadsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkloadsAsyncPager: + r"""Lists Assured Workloads under a CRM Node. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + async def sample_list_workloads(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.ListWorkloadsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workloads(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1.types.ListWorkloadsRequest, dict]]): + The request object. Request for fetching workloads in an + organization. + parent (:class:`str`): + Required. Parent Resource to list workloads from. Must + be of the form + ``organizations/{org_id}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListWorkloadsAsyncPager: + Response of ListWorkloads endpoint. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.ListWorkloadsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_workloads, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkloadsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_violations( + self, + request: Optional[Union[assuredworkloads.ListViolationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListViolationsAsyncPager: + r"""Lists the Violations in the AssuredWorkload Environment. Callers + may also choose to read across multiple Workloads as per + `AIP-159 `__ by using '-' (the + hyphen or dash character) as a wildcard character instead of + workload-id in the parent. Format + ``organizations/{org_id}/locations/{location}/workloads/-`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + async def sample_list_violations(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.ListViolationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_violations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1.types.ListViolationsRequest, dict]]): + The request object. Request for fetching violations in an + organization. + parent (:class:`str`): + Required. The Workload name. Format + ``organizations/{org_id}/locations/{location}/workloads/{workload}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListViolationsAsyncPager: + Response of ListViolations endpoint. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.ListViolationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_violations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListViolationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_violation( + self, + request: Optional[Union[assuredworkloads.GetViolationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Violation: + r"""Retrieves Assured Workload Violation based on ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + async def sample_get_violation(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.GetViolationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_violation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1.types.GetViolationRequest, dict]]): + The request object. Request for fetching a Workload + Violation. + name (:class:`str`): + Required. The resource name of the + Violation to fetch (ie. Violation.name). + Format: + + organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.types.Violation: + Workload monitoring Violation. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.GetViolationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_violation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def acknowledge_violation( + self, + request: Optional[ + Union[assuredworkloads.AcknowledgeViolationRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.AcknowledgeViolationResponse: + r"""Acknowledges an existing violation. By acknowledging + a violation, users acknowledge the existence of a + compliance violation in their workload and decide to + ignore it due to a valid business justification. + Acknowledgement is a permanent operation and it cannot + be reverted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + async def sample_acknowledge_violation(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.AcknowledgeViolationRequest( + name="name_value", + comment="comment_value", + ) + + # Make the request + response = await client.acknowledge_violation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1.types.AcknowledgeViolationRequest, dict]]): + The request object. Request for acknowledging the + violation Next Id: 4 + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.types.AcknowledgeViolationResponse: + Response for violation + acknowledgement + + """ + # Create or coerce a protobuf request object. + request = assuredworkloads.AcknowledgeViolationRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.acknowledge_violation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AssuredWorkloadsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AssuredWorkloadsServiceAsyncClient",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py new file mode 100644 index 000000000000..dbbe19abf163 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py @@ -0,0 +1,1555 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.assuredworkloads_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.assuredworkloads_v1.services.assured_workloads_service import pagers +from google.cloud.assuredworkloads_v1.types import assuredworkloads + +from .transports.base import DEFAULT_CLIENT_INFO, AssuredWorkloadsServiceTransport +from .transports.grpc import AssuredWorkloadsServiceGrpcTransport +from .transports.grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport +from .transports.rest import AssuredWorkloadsServiceRestTransport + + +class AssuredWorkloadsServiceClientMeta(type): + """Metaclass for the AssuredWorkloadsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AssuredWorkloadsServiceTransport]] + _transport_registry["grpc"] = AssuredWorkloadsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AssuredWorkloadsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AssuredWorkloadsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AssuredWorkloadsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AssuredWorkloadsServiceClient(metaclass=AssuredWorkloadsServiceClientMeta): + """Service to manage AssuredWorkloads.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "assuredworkloads.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssuredWorkloadsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssuredWorkloadsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AssuredWorkloadsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AssuredWorkloadsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def violation_path( + organization: str, + location: str, + workload: str, + violation: str, + ) -> str: + """Returns a fully-qualified violation string.""" + return "organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation}".format( + organization=organization, + location=location, + workload=workload, + violation=violation, + ) + + @staticmethod + def parse_violation_path(path: str) -> Dict[str, str]: + """Parses a violation path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/workloads/(?P.+?)/violations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def workload_path( + organization: str, + location: str, + workload: str, + ) -> str: + """Returns a fully-qualified workload string.""" + return "organizations/{organization}/locations/{location}/workloads/{workload}".format( + organization=organization, + location=location, + workload=workload, + ) + + @staticmethod + def parse_workload_path(path: str) -> Dict[str, str]: + """Parses a workload path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/workloads/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AssuredWorkloadsServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the assured workloads service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AssuredWorkloadsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AssuredWorkloadsServiceTransport): + # transport is a AssuredWorkloadsServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_workload( + self, + request: Optional[Union[assuredworkloads.CreateWorkloadRequest, dict]] = None, + *, + parent: Optional[str] = None, + workload: Optional[assuredworkloads.Workload] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates Assured Workload. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + def sample_create_workload(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + workload = assuredworkloads_v1.Workload() + workload.display_name = "display_name_value" + workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" + + request = assuredworkloads_v1.CreateWorkloadRequest( + parent="parent_value", + workload=workload, + ) + + # Make the request + operation = client.create_workload(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1.types.CreateWorkloadRequest, dict]): + The request object. Request for creating a workload. + parent (str): + Required. The resource name of the new Workload's + parent. Must be of the form + ``organizations/{org_id}/locations/{location_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workload (google.cloud.assuredworkloads_v1.types.Workload): + Required. Assured Workload to create + This corresponds to the ``workload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.assuredworkloads_v1.types.Workload` A Workload object for managing highly regulated workloads of cloud + customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workload]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.CreateWorkloadRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.CreateWorkloadRequest): + request = assuredworkloads.CreateWorkloadRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workload is not None: + request.workload = workload + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_workload] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + assuredworkloads.Workload, + metadata_type=assuredworkloads.CreateWorkloadOperationMetadata, + ) + + # Done; return the response. + return response + + def update_workload( + self, + request: Optional[Union[assuredworkloads.UpdateWorkloadRequest, dict]] = None, + *, + workload: Optional[assuredworkloads.Workload] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + r"""Updates an existing workload. Currently allows updating of + workload display_name and labels. For force updates don't set + etag field in the Workload. Only one update operation per + workload can be in progress. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + def sample_update_workload(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + workload = assuredworkloads_v1.Workload() + workload.display_name = "display_name_value" + workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" + + request = assuredworkloads_v1.UpdateWorkloadRequest( + workload=workload, + ) + + # Make the request + response = client.update_workload(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1.types.UpdateWorkloadRequest, dict]): + The request object. Request for Updating a workload. + workload (google.cloud.assuredworkloads_v1.types.Workload): + Required. The workload to update. The workload's + ``name`` field is used to identify the workload to be + updated. Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + + This corresponds to the ``workload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.types.Workload: + A Workload object for managing highly + regulated workloads of cloud customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workload, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.UpdateWorkloadRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.UpdateWorkloadRequest): + request = assuredworkloads.UpdateWorkloadRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workload is not None: + request.workload = workload + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_workload] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workload.name", request.workload.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def restrict_allowed_resources( + self, + request: Optional[ + Union[assuredworkloads.RestrictAllowedResourcesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.RestrictAllowedResourcesResponse: + r"""Restrict the list of resources allowed in the + Workload environment. The current list of allowed + products can be found at + https://cloud.google.com/assured-workloads/docs/supported-products + In addition to assuredworkloads.workload.update + permission, the user should also have + orgpolicy.policy.set permission on the folder resource + to use this functionality. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + def sample_restrict_allowed_resources(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.RestrictAllowedResourcesRequest( + name="name_value", + restriction_type="ALLOW_COMPLIANT_RESOURCES", + ) + + # Make the request + response = client.restrict_allowed_resources(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesRequest, dict]): + The request object. Request for restricting list of + available resources in Workload + environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesResponse: + Response for restricting the list of + allowed resources. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.RestrictAllowedResourcesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.RestrictAllowedResourcesRequest): + request = assuredworkloads.RestrictAllowedResourcesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.restrict_allowed_resources + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_workload( + self, + request: Optional[Union[assuredworkloads.DeleteWorkloadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the workload. Make sure that workload's direct children + are already in a deleted state, otherwise the request will fail + with a FAILED_PRECONDITION error. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + def sample_delete_workload(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.DeleteWorkloadRequest( + name="name_value", + ) + + # Make the request + client.delete_workload(request=request) + + Args: + request (Union[google.cloud.assuredworkloads_v1.types.DeleteWorkloadRequest, dict]): + The request object. Request for deleting a Workload. + name (str): + Required. The ``name`` field is used to identify the + workload. Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.DeleteWorkloadRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.DeleteWorkloadRequest): + request = assuredworkloads.DeleteWorkloadRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_workload] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_workload( + self, + request: Optional[Union[assuredworkloads.GetWorkloadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + r"""Gets Assured Workload associated with a CRM Node + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + def sample_get_workload(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.GetWorkloadRequest( + name="name_value", + ) + + # Make the request + response = client.get_workload(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1.types.GetWorkloadRequest, dict]): + The request object. Request for fetching a workload. + name (str): + Required. The resource name of the Workload to fetch. + This is the workload's relative path in the API, + formatted as + "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". + For example, + "organizations/123/locations/us-east1/workloads/assured-workload-1". + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.types.Workload: + A Workload object for managing highly + regulated workloads of cloud customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.GetWorkloadRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.GetWorkloadRequest): + request = assuredworkloads.GetWorkloadRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_workload] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_workloads( + self, + request: Optional[Union[assuredworkloads.ListWorkloadsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkloadsPager: + r"""Lists Assured Workloads under a CRM Node. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + def sample_list_workloads(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.ListWorkloadsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workloads(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1.types.ListWorkloadsRequest, dict]): + The request object. Request for fetching workloads in an + organization. + parent (str): + Required. Parent Resource to list workloads from. Must + be of the form + ``organizations/{org_id}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListWorkloadsPager: + Response of ListWorkloads endpoint. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.ListWorkloadsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.ListWorkloadsRequest): + request = assuredworkloads.ListWorkloadsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_workloads] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkloadsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_violations( + self, + request: Optional[Union[assuredworkloads.ListViolationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListViolationsPager: + r"""Lists the Violations in the AssuredWorkload Environment. Callers + may also choose to read across multiple Workloads as per + `AIP-159 `__ by using '-' (the + hyphen or dash character) as a wildcard character instead of + workload-id in the parent. Format + ``organizations/{org_id}/locations/{location}/workloads/-`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + def sample_list_violations(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.ListViolationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_violations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1.types.ListViolationsRequest, dict]): + The request object. Request for fetching violations in an + organization. + parent (str): + Required. The Workload name. Format + ``organizations/{org_id}/locations/{location}/workloads/{workload}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListViolationsPager: + Response of ListViolations endpoint. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.ListViolationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.ListViolationsRequest): + request = assuredworkloads.ListViolationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_violations] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListViolationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_violation( + self, + request: Optional[Union[assuredworkloads.GetViolationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Violation: + r"""Retrieves Assured Workload Violation based on ID. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + def sample_get_violation(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.GetViolationRequest( + name="name_value", + ) + + # Make the request + response = client.get_violation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1.types.GetViolationRequest, dict]): + The request object. Request for fetching a Workload + Violation. + name (str): + Required. The resource name of the + Violation to fetch (ie. Violation.name). + Format: + + organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.types.Violation: + Workload monitoring Violation. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.GetViolationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.GetViolationRequest): + request = assuredworkloads.GetViolationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_violation] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def acknowledge_violation( + self, + request: Optional[ + Union[assuredworkloads.AcknowledgeViolationRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.AcknowledgeViolationResponse: + r"""Acknowledges an existing violation. By acknowledging + a violation, users acknowledge the existence of a + compliance violation in their workload and decide to + ignore it due to a valid business justification. + Acknowledgement is a permanent operation and it cannot + be reverted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1 + + def sample_acknowledge_violation(): + # Create a client + client = assuredworkloads_v1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1.AcknowledgeViolationRequest( + name="name_value", + comment="comment_value", + ) + + # Make the request + response = client.acknowledge_violation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1.types.AcknowledgeViolationRequest, dict]): + The request object. Request for acknowledging the + violation Next Id: 4 + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1.types.AcknowledgeViolationResponse: + Response for violation + acknowledgement + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.AcknowledgeViolationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.AcknowledgeViolationRequest): + request = assuredworkloads.AcknowledgeViolationRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.acknowledge_violation] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AssuredWorkloadsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AssuredWorkloadsServiceClient",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/pagers.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/pagers.py new file mode 100644 index 000000000000..3b77e9f0ffbb --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.assuredworkloads_v1.types import assuredworkloads + + +class ListWorkloadsPager: + """A pager for iterating through ``list_workloads`` requests. + + This class thinly wraps an initial + :class:`google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workloads`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkloads`` requests and continue to iterate + through the ``workloads`` field on the + corresponding responses. + + All the usual :class:`google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., assuredworkloads.ListWorkloadsResponse], + request: assuredworkloads.ListWorkloadsRequest, + response: assuredworkloads.ListWorkloadsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.assuredworkloads_v1.types.ListWorkloadsRequest): + The initial request object. + response (google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = assuredworkloads.ListWorkloadsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[assuredworkloads.ListWorkloadsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[assuredworkloads.Workload]: + for page in self.pages: + yield from page.workloads + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkloadsAsyncPager: + """A pager for iterating through ``list_workloads`` requests. + + This class thinly wraps an initial + :class:`google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workloads`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkloads`` requests and continue to iterate + through the ``workloads`` field on the + corresponding responses. + + All the usual :class:`google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[assuredworkloads.ListWorkloadsResponse]], + request: assuredworkloads.ListWorkloadsRequest, + response: assuredworkloads.ListWorkloadsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.assuredworkloads_v1.types.ListWorkloadsRequest): + The initial request object. + response (google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = assuredworkloads.ListWorkloadsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[assuredworkloads.ListWorkloadsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[assuredworkloads.Workload]: + async def async_generator(): + async for page in self.pages: + for response in page.workloads: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListViolationsPager: + """A pager for iterating through ``list_violations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.assuredworkloads_v1.types.ListViolationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``violations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListViolations`` requests and continue to iterate + through the ``violations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.assuredworkloads_v1.types.ListViolationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., assuredworkloads.ListViolationsResponse], + request: assuredworkloads.ListViolationsRequest, + response: assuredworkloads.ListViolationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.assuredworkloads_v1.types.ListViolationsRequest): + The initial request object. + response (google.cloud.assuredworkloads_v1.types.ListViolationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = assuredworkloads.ListViolationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[assuredworkloads.ListViolationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[assuredworkloads.Violation]: + for page in self.pages: + yield from page.violations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListViolationsAsyncPager: + """A pager for iterating through ``list_violations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.assuredworkloads_v1.types.ListViolationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``violations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListViolations`` requests and continue to iterate + through the ``violations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.assuredworkloads_v1.types.ListViolationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[assuredworkloads.ListViolationsResponse]], + request: assuredworkloads.ListViolationsRequest, + response: assuredworkloads.ListViolationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.assuredworkloads_v1.types.ListViolationsRequest): + The initial request object. + response (google.cloud.assuredworkloads_v1.types.ListViolationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = assuredworkloads.ListViolationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[assuredworkloads.ListViolationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[assuredworkloads.Violation]: + async def async_generator(): + async for page in self.pages: + for response in page.violations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/__init__.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/__init__.py new file mode 100644 index 000000000000..077f25e35559 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AssuredWorkloadsServiceTransport +from .grpc import AssuredWorkloadsServiceGrpcTransport +from .grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport +from .rest import ( + AssuredWorkloadsServiceRestInterceptor, + AssuredWorkloadsServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[AssuredWorkloadsServiceTransport]] +_transport_registry["grpc"] = AssuredWorkloadsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AssuredWorkloadsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AssuredWorkloadsServiceRestTransport + +__all__ = ( + "AssuredWorkloadsServiceTransport", + "AssuredWorkloadsServiceGrpcTransport", + "AssuredWorkloadsServiceGrpcAsyncIOTransport", + "AssuredWorkloadsServiceRestTransport", + "AssuredWorkloadsServiceRestInterceptor", +) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/base.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/base.py new file mode 100644 index 000000000000..aaba4fed9469 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/base.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.assuredworkloads_v1 import gapic_version as package_version +from google.cloud.assuredworkloads_v1.types import assuredworkloads + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AssuredWorkloadsServiceTransport(abc.ABC): + """Abstract transport class for AssuredWorkloadsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "assuredworkloads.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_workload: gapic_v1.method.wrap_method( + self.create_workload, + default_timeout=None, + client_info=client_info, + ), + self.update_workload: gapic_v1.method.wrap_method( + self.update_workload, + default_timeout=None, + client_info=client_info, + ), + self.restrict_allowed_resources: gapic_v1.method.wrap_method( + self.restrict_allowed_resources, + default_timeout=None, + client_info=client_info, + ), + self.delete_workload: gapic_v1.method.wrap_method( + self.delete_workload, + default_timeout=None, + client_info=client_info, + ), + self.get_workload: gapic_v1.method.wrap_method( + self.get_workload, + default_timeout=None, + client_info=client_info, + ), + self.list_workloads: gapic_v1.method.wrap_method( + self.list_workloads, + default_timeout=None, + client_info=client_info, + ), + self.list_violations: gapic_v1.method.wrap_method( + self.list_violations, + default_timeout=None, + client_info=client_info, + ), + self.get_violation: gapic_v1.method.wrap_method( + self.get_violation, + default_timeout=None, + client_info=client_info, + ), + self.acknowledge_violation: gapic_v1.method.wrap_method( + self.acknowledge_violation, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_workload( + self, + ) -> Callable[ + [assuredworkloads.CreateWorkloadRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_workload( + self, + ) -> Callable[ + [assuredworkloads.UpdateWorkloadRequest], + Union[assuredworkloads.Workload, Awaitable[assuredworkloads.Workload]], + ]: + raise NotImplementedError() + + @property + def restrict_allowed_resources( + self, + ) -> Callable[ + [assuredworkloads.RestrictAllowedResourcesRequest], + Union[ + assuredworkloads.RestrictAllowedResourcesResponse, + Awaitable[assuredworkloads.RestrictAllowedResourcesResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_workload( + self, + ) -> Callable[ + [assuredworkloads.DeleteWorkloadRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_workload( + self, + ) -> Callable[ + [assuredworkloads.GetWorkloadRequest], + Union[assuredworkloads.Workload, Awaitable[assuredworkloads.Workload]], + ]: + raise NotImplementedError() + + @property + def list_workloads( + self, + ) -> Callable[ + [assuredworkloads.ListWorkloadsRequest], + Union[ + assuredworkloads.ListWorkloadsResponse, + Awaitable[assuredworkloads.ListWorkloadsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_violations( + self, + ) -> Callable[ + [assuredworkloads.ListViolationsRequest], + Union[ + assuredworkloads.ListViolationsResponse, + Awaitable[assuredworkloads.ListViolationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_violation( + self, + ) -> Callable[ + [assuredworkloads.GetViolationRequest], + Union[assuredworkloads.Violation, Awaitable[assuredworkloads.Violation]], + ]: + raise NotImplementedError() + + @property + def acknowledge_violation( + self, + ) -> Callable[ + [assuredworkloads.AcknowledgeViolationRequest], + Union[ + assuredworkloads.AcknowledgeViolationResponse, + Awaitable[assuredworkloads.AcknowledgeViolationResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AssuredWorkloadsServiceTransport",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc.py new file mode 100644 index 000000000000..39aae23f9a21 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc.py @@ -0,0 +1,559 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.assuredworkloads_v1.types import assuredworkloads + +from .base import DEFAULT_CLIENT_INFO, AssuredWorkloadsServiceTransport + + +class AssuredWorkloadsServiceGrpcTransport(AssuredWorkloadsServiceTransport): + """gRPC backend transport for AssuredWorkloadsService. + + Service to manage AssuredWorkloads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "assuredworkloads.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "assuredworkloads.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_workload( + self, + ) -> Callable[[assuredworkloads.CreateWorkloadRequest], operations_pb2.Operation]: + r"""Return a callable for the create workload method over gRPC. + + Creates Assured Workload. + + Returns: + Callable[[~.CreateWorkloadRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workload" not in self._stubs: + self._stubs["create_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/CreateWorkload", + request_serializer=assuredworkloads.CreateWorkloadRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workload"] + + @property + def update_workload( + self, + ) -> Callable[[assuredworkloads.UpdateWorkloadRequest], assuredworkloads.Workload]: + r"""Return a callable for the update workload method over gRPC. + + Updates an existing workload. Currently allows updating of + workload display_name and labels. For force updates don't set + etag field in the Workload. Only one update operation per + workload can be in progress. + + Returns: + Callable[[~.UpdateWorkloadRequest], + ~.Workload]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workload" not in self._stubs: + self._stubs["update_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/UpdateWorkload", + request_serializer=assuredworkloads.UpdateWorkloadRequest.serialize, + response_deserializer=assuredworkloads.Workload.deserialize, + ) + return self._stubs["update_workload"] + + @property + def restrict_allowed_resources( + self, + ) -> Callable[ + [assuredworkloads.RestrictAllowedResourcesRequest], + assuredworkloads.RestrictAllowedResourcesResponse, + ]: + r"""Return a callable for the restrict allowed resources method over gRPC. + + Restrict the list of resources allowed in the + Workload environment. The current list of allowed + products can be found at + https://cloud.google.com/assured-workloads/docs/supported-products + In addition to assuredworkloads.workload.update + permission, the user should also have + orgpolicy.policy.set permission on the folder resource + to use this functionality. + + Returns: + Callable[[~.RestrictAllowedResourcesRequest], + ~.RestrictAllowedResourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restrict_allowed_resources" not in self._stubs: + self._stubs["restrict_allowed_resources"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/RestrictAllowedResources", + request_serializer=assuredworkloads.RestrictAllowedResourcesRequest.serialize, + response_deserializer=assuredworkloads.RestrictAllowedResourcesResponse.deserialize, + ) + return self._stubs["restrict_allowed_resources"] + + @property + def delete_workload( + self, + ) -> Callable[[assuredworkloads.DeleteWorkloadRequest], empty_pb2.Empty]: + r"""Return a callable for the delete workload method over gRPC. + + Deletes the workload. Make sure that workload's direct children + are already in a deleted state, otherwise the request will fail + with a FAILED_PRECONDITION error. + + Returns: + Callable[[~.DeleteWorkloadRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workload" not in self._stubs: + self._stubs["delete_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/DeleteWorkload", + request_serializer=assuredworkloads.DeleteWorkloadRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_workload"] + + @property + def get_workload( + self, + ) -> Callable[[assuredworkloads.GetWorkloadRequest], assuredworkloads.Workload]: + r"""Return a callable for the get workload method over gRPC. + + Gets Assured Workload associated with a CRM Node + + Returns: + Callable[[~.GetWorkloadRequest], + ~.Workload]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workload" not in self._stubs: + self._stubs["get_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/GetWorkload", + request_serializer=assuredworkloads.GetWorkloadRequest.serialize, + response_deserializer=assuredworkloads.Workload.deserialize, + ) + return self._stubs["get_workload"] + + @property + def list_workloads( + self, + ) -> Callable[ + [assuredworkloads.ListWorkloadsRequest], assuredworkloads.ListWorkloadsResponse + ]: + r"""Return a callable for the list workloads method over gRPC. + + Lists Assured Workloads under a CRM Node. + + Returns: + Callable[[~.ListWorkloadsRequest], + ~.ListWorkloadsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workloads" not in self._stubs: + self._stubs["list_workloads"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/ListWorkloads", + request_serializer=assuredworkloads.ListWorkloadsRequest.serialize, + response_deserializer=assuredworkloads.ListWorkloadsResponse.deserialize, + ) + return self._stubs["list_workloads"] + + @property + def list_violations( + self, + ) -> Callable[ + [assuredworkloads.ListViolationsRequest], + assuredworkloads.ListViolationsResponse, + ]: + r"""Return a callable for the list violations method over gRPC. + + Lists the Violations in the AssuredWorkload Environment. Callers + may also choose to read across multiple Workloads as per + `AIP-159 `__ by using '-' (the + hyphen or dash character) as a wildcard character instead of + workload-id in the parent. Format + ``organizations/{org_id}/locations/{location}/workloads/-`` + + Returns: + Callable[[~.ListViolationsRequest], + ~.ListViolationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_violations" not in self._stubs: + self._stubs["list_violations"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/ListViolations", + request_serializer=assuredworkloads.ListViolationsRequest.serialize, + response_deserializer=assuredworkloads.ListViolationsResponse.deserialize, + ) + return self._stubs["list_violations"] + + @property + def get_violation( + self, + ) -> Callable[[assuredworkloads.GetViolationRequest], assuredworkloads.Violation]: + r"""Return a callable for the get violation method over gRPC. + + Retrieves Assured Workload Violation based on ID. + + Returns: + Callable[[~.GetViolationRequest], + ~.Violation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_violation" not in self._stubs: + self._stubs["get_violation"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/GetViolation", + request_serializer=assuredworkloads.GetViolationRequest.serialize, + response_deserializer=assuredworkloads.Violation.deserialize, + ) + return self._stubs["get_violation"] + + @property + def acknowledge_violation( + self, + ) -> Callable[ + [assuredworkloads.AcknowledgeViolationRequest], + assuredworkloads.AcknowledgeViolationResponse, + ]: + r"""Return a callable for the acknowledge violation method over gRPC. + + Acknowledges an existing violation. By acknowledging + a violation, users acknowledge the existence of a + compliance violation in their workload and decide to + ignore it due to a valid business justification. + Acknowledgement is a permanent operation and it cannot + be reverted. + + Returns: + Callable[[~.AcknowledgeViolationRequest], + ~.AcknowledgeViolationResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge_violation" not in self._stubs: + self._stubs["acknowledge_violation"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/AcknowledgeViolation", + request_serializer=assuredworkloads.AcknowledgeViolationRequest.serialize, + response_deserializer=assuredworkloads.AcknowledgeViolationResponse.deserialize, + ) + return self._stubs["acknowledge_violation"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AssuredWorkloadsServiceGrpcTransport",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc_asyncio.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..20f692f55f58 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc_asyncio.py @@ -0,0 +1,569 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.assuredworkloads_v1.types import assuredworkloads + +from .base import DEFAULT_CLIENT_INFO, AssuredWorkloadsServiceTransport +from .grpc import AssuredWorkloadsServiceGrpcTransport + + +class AssuredWorkloadsServiceGrpcAsyncIOTransport(AssuredWorkloadsServiceTransport): + """gRPC AsyncIO backend transport for AssuredWorkloadsService. + + Service to manage AssuredWorkloads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "assuredworkloads.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "assuredworkloads.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_workload( + self, + ) -> Callable[ + [assuredworkloads.CreateWorkloadRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create workload method over gRPC. + + Creates Assured Workload. + + Returns: + Callable[[~.CreateWorkloadRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workload" not in self._stubs: + self._stubs["create_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/CreateWorkload", + request_serializer=assuredworkloads.CreateWorkloadRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workload"] + + @property + def update_workload( + self, + ) -> Callable[ + [assuredworkloads.UpdateWorkloadRequest], Awaitable[assuredworkloads.Workload] + ]: + r"""Return a callable for the update workload method over gRPC. + + Updates an existing workload. Currently allows updating of + workload display_name and labels. For force updates don't set + etag field in the Workload. Only one update operation per + workload can be in progress. + + Returns: + Callable[[~.UpdateWorkloadRequest], + Awaitable[~.Workload]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workload" not in self._stubs: + self._stubs["update_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/UpdateWorkload", + request_serializer=assuredworkloads.UpdateWorkloadRequest.serialize, + response_deserializer=assuredworkloads.Workload.deserialize, + ) + return self._stubs["update_workload"] + + @property + def restrict_allowed_resources( + self, + ) -> Callable[ + [assuredworkloads.RestrictAllowedResourcesRequest], + Awaitable[assuredworkloads.RestrictAllowedResourcesResponse], + ]: + r"""Return a callable for the restrict allowed resources method over gRPC. + + Restrict the list of resources allowed in the + Workload environment. The current list of allowed + products can be found at + https://cloud.google.com/assured-workloads/docs/supported-products + In addition to assuredworkloads.workload.update + permission, the user should also have + orgpolicy.policy.set permission on the folder resource + to use this functionality. + + Returns: + Callable[[~.RestrictAllowedResourcesRequest], + Awaitable[~.RestrictAllowedResourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restrict_allowed_resources" not in self._stubs: + self._stubs["restrict_allowed_resources"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/RestrictAllowedResources", + request_serializer=assuredworkloads.RestrictAllowedResourcesRequest.serialize, + response_deserializer=assuredworkloads.RestrictAllowedResourcesResponse.deserialize, + ) + return self._stubs["restrict_allowed_resources"] + + @property + def delete_workload( + self, + ) -> Callable[[assuredworkloads.DeleteWorkloadRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete workload method over gRPC. + + Deletes the workload. Make sure that workload's direct children + are already in a deleted state, otherwise the request will fail + with a FAILED_PRECONDITION error. + + Returns: + Callable[[~.DeleteWorkloadRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workload" not in self._stubs: + self._stubs["delete_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/DeleteWorkload", + request_serializer=assuredworkloads.DeleteWorkloadRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_workload"] + + @property + def get_workload( + self, + ) -> Callable[ + [assuredworkloads.GetWorkloadRequest], Awaitable[assuredworkloads.Workload] + ]: + r"""Return a callable for the get workload method over gRPC. + + Gets Assured Workload associated with a CRM Node + + Returns: + Callable[[~.GetWorkloadRequest], + Awaitable[~.Workload]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workload" not in self._stubs: + self._stubs["get_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/GetWorkload", + request_serializer=assuredworkloads.GetWorkloadRequest.serialize, + response_deserializer=assuredworkloads.Workload.deserialize, + ) + return self._stubs["get_workload"] + + @property + def list_workloads( + self, + ) -> Callable[ + [assuredworkloads.ListWorkloadsRequest], + Awaitable[assuredworkloads.ListWorkloadsResponse], + ]: + r"""Return a callable for the list workloads method over gRPC. + + Lists Assured Workloads under a CRM Node. + + Returns: + Callable[[~.ListWorkloadsRequest], + Awaitable[~.ListWorkloadsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workloads" not in self._stubs: + self._stubs["list_workloads"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/ListWorkloads", + request_serializer=assuredworkloads.ListWorkloadsRequest.serialize, + response_deserializer=assuredworkloads.ListWorkloadsResponse.deserialize, + ) + return self._stubs["list_workloads"] + + @property + def list_violations( + self, + ) -> Callable[ + [assuredworkloads.ListViolationsRequest], + Awaitable[assuredworkloads.ListViolationsResponse], + ]: + r"""Return a callable for the list violations method over gRPC. + + Lists the Violations in the AssuredWorkload Environment. Callers + may also choose to read across multiple Workloads as per + `AIP-159 `__ by using '-' (the + hyphen or dash character) as a wildcard character instead of + workload-id in the parent. Format + ``organizations/{org_id}/locations/{location}/workloads/-`` + + Returns: + Callable[[~.ListViolationsRequest], + Awaitable[~.ListViolationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_violations" not in self._stubs: + self._stubs["list_violations"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/ListViolations", + request_serializer=assuredworkloads.ListViolationsRequest.serialize, + response_deserializer=assuredworkloads.ListViolationsResponse.deserialize, + ) + return self._stubs["list_violations"] + + @property + def get_violation( + self, + ) -> Callable[ + [assuredworkloads.GetViolationRequest], Awaitable[assuredworkloads.Violation] + ]: + r"""Return a callable for the get violation method over gRPC. + + Retrieves Assured Workload Violation based on ID. + + Returns: + Callable[[~.GetViolationRequest], + Awaitable[~.Violation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_violation" not in self._stubs: + self._stubs["get_violation"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/GetViolation", + request_serializer=assuredworkloads.GetViolationRequest.serialize, + response_deserializer=assuredworkloads.Violation.deserialize, + ) + return self._stubs["get_violation"] + + @property + def acknowledge_violation( + self, + ) -> Callable[ + [assuredworkloads.AcknowledgeViolationRequest], + Awaitable[assuredworkloads.AcknowledgeViolationResponse], + ]: + r"""Return a callable for the acknowledge violation method over gRPC. + + Acknowledges an existing violation. By acknowledging + a violation, users acknowledge the existence of a + compliance violation in their workload and decide to + ignore it due to a valid business justification. + Acknowledgement is a permanent operation and it cannot + be reverted. + + Returns: + Callable[[~.AcknowledgeViolationRequest], + Awaitable[~.AcknowledgeViolationResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge_violation" not in self._stubs: + self._stubs["acknowledge_violation"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/AcknowledgeViolation", + request_serializer=assuredworkloads.AcknowledgeViolationRequest.serialize, + response_deserializer=assuredworkloads.AcknowledgeViolationResponse.deserialize, + ) + return self._stubs["acknowledge_violation"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("AssuredWorkloadsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest.py new file mode 100644 index 000000000000..1f227bab17bc --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest.py @@ -0,0 +1,1281 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.assuredworkloads_v1.types import assuredworkloads + +from .base import AssuredWorkloadsServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AssuredWorkloadsServiceRestInterceptor: + """Interceptor for AssuredWorkloadsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AssuredWorkloadsServiceRestTransport. + + .. code-block:: python + class MyCustomAssuredWorkloadsServiceInterceptor(AssuredWorkloadsServiceRestInterceptor): + def pre_acknowledge_violation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_acknowledge_violation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_workload(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_workload(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_workload(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_violation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_violation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_workload(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_workload(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_violations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_violations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_workloads(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_workloads(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restrict_allowed_resources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restrict_allowed_resources(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_workload(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_workload(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AssuredWorkloadsServiceRestTransport(interceptor=MyCustomAssuredWorkloadsServiceInterceptor()) + client = AssuredWorkloadsServiceClient(transport=transport) + + + """ + + def pre_create_workload( + self, + request: assuredworkloads.CreateWorkloadRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[assuredworkloads.CreateWorkloadRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_workload + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def post_create_workload( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_workload + + Override in a subclass to manipulate the response + after it is returned by the AssuredWorkloadsService server but before + it is returned to user code. + """ + return response + + def pre_delete_workload( + self, + request: assuredworkloads.DeleteWorkloadRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[assuredworkloads.DeleteWorkloadRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_workload + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def pre_get_workload( + self, + request: assuredworkloads.GetWorkloadRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[assuredworkloads.GetWorkloadRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_workload + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def post_get_workload( + self, response: assuredworkloads.Workload + ) -> assuredworkloads.Workload: + """Post-rpc interceptor for get_workload + + Override in a subclass to manipulate the response + after it is returned by the AssuredWorkloadsService server but before + it is returned to user code. + """ + return response + + def pre_list_workloads( + self, + request: assuredworkloads.ListWorkloadsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[assuredworkloads.ListWorkloadsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_workloads + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def post_list_workloads( + self, response: assuredworkloads.ListWorkloadsResponse + ) -> assuredworkloads.ListWorkloadsResponse: + """Post-rpc interceptor for list_workloads + + Override in a subclass to manipulate the response + after it is returned by the AssuredWorkloadsService server but before + it is returned to user code. + """ + return response + + def pre_restrict_allowed_resources( + self, + request: assuredworkloads.RestrictAllowedResourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + assuredworkloads.RestrictAllowedResourcesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for restrict_allowed_resources + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def post_restrict_allowed_resources( + self, response: assuredworkloads.RestrictAllowedResourcesResponse + ) -> assuredworkloads.RestrictAllowedResourcesResponse: + """Post-rpc interceptor for restrict_allowed_resources + + Override in a subclass to manipulate the response + after it is returned by the AssuredWorkloadsService server but before + it is returned to user code. + """ + return response + + def pre_update_workload( + self, + request: assuredworkloads.UpdateWorkloadRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[assuredworkloads.UpdateWorkloadRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_workload + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def post_update_workload( + self, response: assuredworkloads.Workload + ) -> assuredworkloads.Workload: + """Post-rpc interceptor for update_workload + + Override in a subclass to manipulate the response + after it is returned by the AssuredWorkloadsService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AssuredWorkloadsService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the AssuredWorkloadsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AssuredWorkloadsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AssuredWorkloadsServiceRestInterceptor + + +class AssuredWorkloadsServiceRestTransport(AssuredWorkloadsServiceTransport): + """REST backend transport for AssuredWorkloadsService. + + Service to manage AssuredWorkloads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "assuredworkloads.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AssuredWorkloadsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AssuredWorkloadsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _AcknowledgeViolation(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("AcknowledgeViolation") + + def __call__( + self, + request: assuredworkloads.AcknowledgeViolationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.AcknowledgeViolationResponse: + raise NotImplementedError( + "Method AcknowledgeViolation is not available over REST transport" + ) + + class _CreateWorkload(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("CreateWorkload") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: assuredworkloads.CreateWorkloadRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create workload method over HTTP. + + Args: + request (~.assuredworkloads.CreateWorkloadRequest): + The request object. Request for creating a workload. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=organizations/*/locations/*}/workloads", + "body": "workload", + }, + ] + request, metadata = self._interceptor.pre_create_workload(request, metadata) + pb_request = assuredworkloads.CreateWorkloadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_workload(resp) + return resp + + class _DeleteWorkload(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("DeleteWorkload") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: assuredworkloads.DeleteWorkloadRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete workload method over HTTP. + + Args: + request (~.assuredworkloads.DeleteWorkloadRequest): + The request object. Request for deleting a Workload. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/workloads/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_workload(request, metadata) + pb_request = assuredworkloads.DeleteWorkloadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetViolation(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("GetViolation") + + def __call__( + self, + request: assuredworkloads.GetViolationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Violation: + raise NotImplementedError( + "Method GetViolation is not available over REST transport" + ) + + class _GetWorkload(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("GetWorkload") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: assuredworkloads.GetWorkloadRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + r"""Call the get workload method over HTTP. + + Args: + request (~.assuredworkloads.GetWorkloadRequest): + The request object. Request for fetching a workload. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.assuredworkloads.Workload: + A Workload object for managing highly + regulated workloads of cloud customers. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/workloads/*}", + }, + ] + request, metadata = self._interceptor.pre_get_workload(request, metadata) + pb_request = assuredworkloads.GetWorkloadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = assuredworkloads.Workload() + pb_resp = assuredworkloads.Workload.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_workload(resp) + return resp + + class _ListViolations(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("ListViolations") + + def __call__( + self, + request: assuredworkloads.ListViolationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.ListViolationsResponse: + raise NotImplementedError( + "Method ListViolations is not available over REST transport" + ) + + class _ListWorkloads(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("ListWorkloads") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: assuredworkloads.ListWorkloadsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.ListWorkloadsResponse: + r"""Call the list workloads method over HTTP. + + Args: + request (~.assuredworkloads.ListWorkloadsRequest): + The request object. Request for fetching workloads in an + organization. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.assuredworkloads.ListWorkloadsResponse: + Response of ListWorkloads endpoint. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=organizations/*/locations/*}/workloads", + }, + ] + request, metadata = self._interceptor.pre_list_workloads(request, metadata) + pb_request = assuredworkloads.ListWorkloadsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = assuredworkloads.ListWorkloadsResponse() + pb_resp = assuredworkloads.ListWorkloadsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_workloads(resp) + return resp + + class _RestrictAllowedResources(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("RestrictAllowedResources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: assuredworkloads.RestrictAllowedResourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.RestrictAllowedResourcesResponse: + r"""Call the restrict allowed + resources method over HTTP. + + Args: + request (~.assuredworkloads.RestrictAllowedResourcesRequest): + The request object. Request for restricting list of + available resources in Workload + environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.assuredworkloads.RestrictAllowedResourcesResponse: + Response for restricting the list of + allowed resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/workloads/*}:restrictAllowedResources", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restrict_allowed_resources( + request, metadata + ) + pb_request = assuredworkloads.RestrictAllowedResourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = assuredworkloads.RestrictAllowedResourcesResponse() + pb_resp = assuredworkloads.RestrictAllowedResourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restrict_allowed_resources(resp) + return resp + + class _UpdateWorkload(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("UpdateWorkload") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: assuredworkloads.UpdateWorkloadRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + r"""Call the update workload method over HTTP. + + Args: + request (~.assuredworkloads.UpdateWorkloadRequest): + The request object. Request for Updating a workload. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.assuredworkloads.Workload: + A Workload object for managing highly + regulated workloads of cloud customers. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{workload.name=organizations/*/locations/*/workloads/*}", + "body": "workload", + }, + ] + request, metadata = self._interceptor.pre_update_workload(request, metadata) + pb_request = assuredworkloads.UpdateWorkloadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = assuredworkloads.Workload() + pb_resp = assuredworkloads.Workload.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_workload(resp) + return resp + + @property + def acknowledge_violation( + self, + ) -> Callable[ + [assuredworkloads.AcknowledgeViolationRequest], + assuredworkloads.AcknowledgeViolationResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AcknowledgeViolation(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_workload( + self, + ) -> Callable[[assuredworkloads.CreateWorkloadRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkload(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_workload( + self, + ) -> Callable[[assuredworkloads.DeleteWorkloadRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkload(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_violation( + self, + ) -> Callable[[assuredworkloads.GetViolationRequest], assuredworkloads.Violation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetViolation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_workload( + self, + ) -> Callable[[assuredworkloads.GetWorkloadRequest], assuredworkloads.Workload]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkload(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_violations( + self, + ) -> Callable[ + [assuredworkloads.ListViolationsRequest], + assuredworkloads.ListViolationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListViolations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_workloads( + self, + ) -> Callable[ + [assuredworkloads.ListWorkloadsRequest], assuredworkloads.ListWorkloadsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkloads(self._session, self._host, self._interceptor) # type: ignore + + @property + def restrict_allowed_resources( + self, + ) -> Callable[ + [assuredworkloads.RestrictAllowedResourcesRequest], + assuredworkloads.RestrictAllowedResourcesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestrictAllowedResources(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_workload( + self, + ) -> Callable[[assuredworkloads.UpdateWorkloadRequest], assuredworkloads.Workload]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkload(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AssuredWorkloadsServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(AssuredWorkloadsServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AssuredWorkloadsServiceRestTransport",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/types/__init__.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/types/__init__.py new file mode 100644 index 000000000000..fa63dad6a875 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/types/__init__.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .assuredworkloads import ( + AcknowledgeViolationRequest, + AcknowledgeViolationResponse, + CreateWorkloadOperationMetadata, + CreateWorkloadRequest, + DeleteWorkloadRequest, + GetViolationRequest, + GetWorkloadRequest, + ListViolationsRequest, + ListViolationsResponse, + ListWorkloadsRequest, + ListWorkloadsResponse, + RestrictAllowedResourcesRequest, + RestrictAllowedResourcesResponse, + TimeWindow, + UpdateWorkloadRequest, + Violation, + Workload, +) + +__all__ = ( + "AcknowledgeViolationRequest", + "AcknowledgeViolationResponse", + "CreateWorkloadOperationMetadata", + "CreateWorkloadRequest", + "DeleteWorkloadRequest", + "GetViolationRequest", + "GetWorkloadRequest", + "ListViolationsRequest", + "ListViolationsResponse", + "ListWorkloadsRequest", + "ListWorkloadsResponse", + "RestrictAllowedResourcesRequest", + "RestrictAllowedResourcesResponse", + "TimeWindow", + "UpdateWorkloadRequest", + "Violation", + "Workload", +) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/types/assuredworkloads.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/types/assuredworkloads.py new file mode 100644 index 000000000000..deb8e9c08e9f --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/types/assuredworkloads.py @@ -0,0 +1,1169 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.assuredworkloads.v1", + manifest={ + "CreateWorkloadRequest", + "UpdateWorkloadRequest", + "DeleteWorkloadRequest", + "GetWorkloadRequest", + "ListWorkloadsRequest", + "ListWorkloadsResponse", + "Workload", + "CreateWorkloadOperationMetadata", + "RestrictAllowedResourcesRequest", + "RestrictAllowedResourcesResponse", + "AcknowledgeViolationRequest", + "AcknowledgeViolationResponse", + "TimeWindow", + "ListViolationsRequest", + "ListViolationsResponse", + "GetViolationRequest", + "Violation", + }, +) + + +class CreateWorkloadRequest(proto.Message): + r"""Request for creating a workload. + + Attributes: + parent (str): + Required. The resource name of the new Workload's parent. + Must be of the form + ``organizations/{org_id}/locations/{location_id}``. + workload (google.cloud.assuredworkloads_v1.types.Workload): + Required. Assured Workload to create + external_id (str): + Optional. A identifier associated with the + workload and underlying projects which allows + for the break down of billing costs for a + workload. The value provided for the identifier + will add a label to the workload and contained + projects with the identifier as the value. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + workload: "Workload" = proto.Field( + proto.MESSAGE, + number=2, + message="Workload", + ) + external_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateWorkloadRequest(proto.Message): + r"""Request for Updating a workload. + + Attributes: + workload (google.cloud.assuredworkloads_v1.types.Workload): + Required. The workload to update. The workload's ``name`` + field is used to identify the workload to be updated. + Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. + """ + + workload: "Workload" = proto.Field( + proto.MESSAGE, + number=1, + message="Workload", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteWorkloadRequest(proto.Message): + r"""Request for deleting a Workload. + + Attributes: + name (str): + Required. The ``name`` field is used to identify the + workload. Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + etag (str): + Optional. The etag of the workload. + If this is provided, it must match the server's + etag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetWorkloadRequest(proto.Message): + r"""Request for fetching a workload. + + Attributes: + name (str): + Required. The resource name of the Workload to fetch. This + is the workload's relative path in the API, formatted as + "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". + For example, + "organizations/123/locations/us-east1/workloads/assured-workload-1". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListWorkloadsRequest(proto.Message): + r"""Request for fetching workloads in an organization. + + Attributes: + parent (str): + Required. Parent Resource to list workloads from. Must be of + the form ``organizations/{org_id}/locations/{location}``. + page_size (int): + Page size. + page_token (str): + Page token returned from previous request. + Page token contains context from previous + request. Page token needs to be passed in the + second and following requests. + filter (str): + A custom filter for filtering by properties + of a workload. At this time, only filtering by + labels is supported. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListWorkloadsResponse(proto.Message): + r"""Response of ListWorkloads endpoint. + + Attributes: + workloads (MutableSequence[google.cloud.assuredworkloads_v1.types.Workload]): + List of Workloads under a given parent. + next_page_token (str): + The next page token. Return empty if reached + the last page. + """ + + @property + def raw_page(self): + return self + + workloads: MutableSequence["Workload"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Workload", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Workload(proto.Message): + r"""A Workload object for managing highly regulated workloads of + cloud customers. + + Attributes: + name (str): + Optional. The resource name of the workload. + Format: + + organizations/{organization}/locations/{location}/workloads/{workload} + + Read-only. + display_name (str): + Required. The user-assigned display name of + the Workload. When present it must be between 4 + to 30 characters. Allowed characters are: + lowercase and uppercase letters, numbers, + hyphen, and spaces. + + Example: My Workload + resources (MutableSequence[google.cloud.assuredworkloads_v1.types.Workload.ResourceInfo]): + Output only. The resources associated with + this workload. These resources will be created + when creating the workload. If any of the + projects already exist, the workload creation + will fail. Always read only. + compliance_regime (google.cloud.assuredworkloads_v1.types.Workload.ComplianceRegime): + Required. Immutable. Compliance Regime + associated with this workload. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Immutable. The Workload creation + timestamp. + billing_account (str): + Optional. The billing account used for the resources which + are direct children of workload. This billing account is + initially associated with the resources created as part of + Workload creation. After the initial creation of these + resources, the customer can change the assigned billing + account. The resource name has the form + ``billingAccounts/{billing_account_id}``. For example, + ``billingAccounts/012345-567890-ABCDEF``. + etag (str): + Optional. ETag of the workload, it is + calculated on the basis of the Workload + contents. It will be used in Update & Delete + operations. + labels (MutableMapping[str, str]): + Optional. Labels applied to the workload. + provisioned_resources_parent (str): + Input only. The parent resource for the resources managed by + this Assured Workload. May be either empty or a folder + resource which is a child of the Workload parent. If not + specified all resources are created under the parent + organization. Format: folders/{folder_id} + kms_settings (google.cloud.assuredworkloads_v1.types.Workload.KMSSettings): + Input only. Settings used to create a CMEK crypto key. When + set, a project with a KMS CMEK key is provisioned. This + field is deprecated as of Feb 28, 2022. In order to create a + Keyring, callers should specify, ENCRYPTION_KEYS_PROJECT or + KEYRING in ResourceSettings.resource_type field. + resource_settings (MutableSequence[google.cloud.assuredworkloads_v1.types.Workload.ResourceSettings]): + Input only. Resource properties that are used + to customize workload resources. These + properties (such as custom project id) will be + used to create workload resources if possible. + This field is optional. + kaj_enrollment_state (google.cloud.assuredworkloads_v1.types.Workload.KajEnrollmentState): + Output only. Represents the KAJ enrollment + state of the given workload. + enable_sovereign_controls (bool): + Optional. Indicates the sovereignty status of + the given workload. Currently meant to be used + by Europe/Canada customers. + saa_enrollment_response (google.cloud.assuredworkloads_v1.types.Workload.SaaEnrollmentResponse): + Output only. Represents the SAA enrollment + response of the given workload. SAA enrollment + response is queried during GetWorkload call. In + failure cases, user friendly error message is + shown in SAA details page. + compliant_but_disallowed_services (MutableSequence[str]): + Output only. Urls for services which are + compliant for this Assured Workload, but which + are currently disallowed by the + ResourceUsageRestriction org policy. Invoke + RestrictAllowedResources endpoint to allow your + project developers to use these services in + their environment.". + partner (google.cloud.assuredworkloads_v1.types.Workload.Partner): + Optional. Compliance Regime associated with + this workload. + """ + + class ComplianceRegime(proto.Enum): + r"""Supported Compliance Regimes. + + Values: + COMPLIANCE_REGIME_UNSPECIFIED (0): + Unknown compliance regime. + IL4 (1): + Information protection as per DoD IL4 + requirements. + CJIS (2): + Criminal Justice Information Services (CJIS) + Security policies. + FEDRAMP_HIGH (3): + FedRAMP High data protection controls + FEDRAMP_MODERATE (4): + FedRAMP Moderate data protection controls + US_REGIONAL_ACCESS (5): + Assured Workloads For US Regions data + protection controls + HIPAA (6): + Health Insurance Portability and + Accountability Act controls + HITRUST (7): + Health Information Trust Alliance controls + EU_REGIONS_AND_SUPPORT (8): + Assured Workloads For EU Regions and Support + controls + CA_REGIONS_AND_SUPPORT (9): + Assured Workloads For Canada Regions and + Support controls + ITAR (10): + International Traffic in Arms Regulations + AU_REGIONS_AND_US_SUPPORT (11): + Assured Workloads for Australia Regions and + Support controls Available for public preview + consumption. Don't create production workloads. + ASSURED_WORKLOADS_FOR_PARTNERS (12): + Assured Workloads for Partners + """ + COMPLIANCE_REGIME_UNSPECIFIED = 0 + IL4 = 1 + CJIS = 2 + FEDRAMP_HIGH = 3 + FEDRAMP_MODERATE = 4 + US_REGIONAL_ACCESS = 5 + HIPAA = 6 + HITRUST = 7 + EU_REGIONS_AND_SUPPORT = 8 + CA_REGIONS_AND_SUPPORT = 9 + ITAR = 10 + AU_REGIONS_AND_US_SUPPORT = 11 + ASSURED_WORKLOADS_FOR_PARTNERS = 12 + + class KajEnrollmentState(proto.Enum): + r"""Key Access Justifications(KAJ) Enrollment State. + + Values: + KAJ_ENROLLMENT_STATE_UNSPECIFIED (0): + Default State for KAJ Enrollment. + KAJ_ENROLLMENT_STATE_PENDING (1): + Pending State for KAJ Enrollment. + KAJ_ENROLLMENT_STATE_COMPLETE (2): + Complete State for KAJ Enrollment. + """ + KAJ_ENROLLMENT_STATE_UNSPECIFIED = 0 + KAJ_ENROLLMENT_STATE_PENDING = 1 + KAJ_ENROLLMENT_STATE_COMPLETE = 2 + + class Partner(proto.Enum): + r"""Supported Assured Workloads Partners. + + Values: + PARTNER_UNSPECIFIED (0): + Unknown partner regime/controls. + LOCAL_CONTROLS_BY_S3NS (1): + S3NS regime/controls. + """ + PARTNER_UNSPECIFIED = 0 + LOCAL_CONTROLS_BY_S3NS = 1 + + class ResourceInfo(proto.Message): + r"""Represent the resources that are children of this Workload. + + Attributes: + resource_id (int): + Resource identifier. For a project this represents + project_number. + resource_type (google.cloud.assuredworkloads_v1.types.Workload.ResourceInfo.ResourceType): + Indicates the type of resource. + """ + + class ResourceType(proto.Enum): + r"""The type of resource. + + Values: + RESOURCE_TYPE_UNSPECIFIED (0): + Unknown resource type. + CONSUMER_PROJECT (1): + Consumer project. AssuredWorkloads Projects are no longer + supported. This field will be ignored only in CreateWorkload + requests. ListWorkloads and GetWorkload will continue to + provide projects information. Use CONSUMER_FOLDER instead. + CONSUMER_FOLDER (4): + Consumer Folder. + ENCRYPTION_KEYS_PROJECT (2): + Consumer project containing encryption keys. + KEYRING (3): + Keyring resource that hosts encryption keys. + """ + RESOURCE_TYPE_UNSPECIFIED = 0 + CONSUMER_PROJECT = 1 + CONSUMER_FOLDER = 4 + ENCRYPTION_KEYS_PROJECT = 2 + KEYRING = 3 + + resource_id: int = proto.Field( + proto.INT64, + number=1, + ) + resource_type: "Workload.ResourceInfo.ResourceType" = proto.Field( + proto.ENUM, + number=2, + enum="Workload.ResourceInfo.ResourceType", + ) + + class KMSSettings(proto.Message): + r"""Settings specific to the Key Management Service. This message is + deprecated. In order to create a Keyring, callers should specify, + ENCRYPTION_KEYS_PROJECT or KEYRING in ResourceSettings.resource_type + field. + + Attributes: + next_rotation_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Input only. Immutable. The time at + which the Key Management Service will + automatically create a new version of the crypto + key and mark it as the primary. + rotation_period (google.protobuf.duration_pb2.Duration): + Required. Input only. Immutable. [next_rotation_time] will + be advanced by this period when the Key Management Service + automatically rotates a key. Must be at least 24 hours and + at most 876,000 hours. + """ + + next_rotation_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + rotation_period: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + class ResourceSettings(proto.Message): + r"""Represent the custom settings for the resources to be + created. + + Attributes: + resource_id (str): + Resource identifier. For a project this represents + project_id. If the project is already taken, the workload + creation will fail. For KeyRing, this represents the + keyring_id. For a folder, don't set this value as folder_id + is assigned by Google. + resource_type (google.cloud.assuredworkloads_v1.types.Workload.ResourceInfo.ResourceType): + Indicates the type of resource. This field should be + specified to correspond the id to the right resource type + (CONSUMER_FOLDER or ENCRYPTION_KEYS_PROJECT) + display_name (str): + User-assigned resource display name. + If not empty it will be used to create a + resource with the specified name. + """ + + resource_id: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: "Workload.ResourceInfo.ResourceType" = proto.Field( + proto.ENUM, + number=2, + enum="Workload.ResourceInfo.ResourceType", + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + + class SaaEnrollmentResponse(proto.Message): + r"""Signed Access Approvals (SAA) enrollment response. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + setup_status (google.cloud.assuredworkloads_v1.types.Workload.SaaEnrollmentResponse.SetupState): + Indicates SAA enrollment status of a given + workload. + + This field is a member of `oneof`_ ``_setup_status``. + setup_errors (MutableSequence[google.cloud.assuredworkloads_v1.types.Workload.SaaEnrollmentResponse.SetupError]): + Indicates SAA enrollment setup error if any. + """ + + class SetupState(proto.Enum): + r"""Setup state of SAA enrollment. + + Values: + SETUP_STATE_UNSPECIFIED (0): + Unspecified. + STATUS_PENDING (1): + SAA enrollment pending. + STATUS_COMPLETE (2): + SAA enrollment comopleted. + """ + SETUP_STATE_UNSPECIFIED = 0 + STATUS_PENDING = 1 + STATUS_COMPLETE = 2 + + class SetupError(proto.Enum): + r"""Setup error of SAA enrollment. + + Values: + SETUP_ERROR_UNSPECIFIED (0): + Unspecified. + ERROR_INVALID_BASE_SETUP (1): + Invalid states for all customers, to be + redirected to AA UI for additional details. + ERROR_MISSING_EXTERNAL_SIGNING_KEY (2): + Returned when there is not an EKM key + configured. + ERROR_NOT_ALL_SERVICES_ENROLLED (3): + Returned when there are no enrolled services + or the customer is enrolled in CAA only for a + subset of services. + ERROR_SETUP_CHECK_FAILED (4): + Returned when exception was encountered + during evaluation of other criteria. + """ + SETUP_ERROR_UNSPECIFIED = 0 + ERROR_INVALID_BASE_SETUP = 1 + ERROR_MISSING_EXTERNAL_SIGNING_KEY = 2 + ERROR_NOT_ALL_SERVICES_ENROLLED = 3 + ERROR_SETUP_CHECK_FAILED = 4 + + setup_status: "Workload.SaaEnrollmentResponse.SetupState" = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum="Workload.SaaEnrollmentResponse.SetupState", + ) + setup_errors: MutableSequence[ + "Workload.SaaEnrollmentResponse.SetupError" + ] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="Workload.SaaEnrollmentResponse.SetupError", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + resources: MutableSequence[ResourceInfo] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=ResourceInfo, + ) + compliance_regime: ComplianceRegime = proto.Field( + proto.ENUM, + number=4, + enum=ComplianceRegime, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + billing_account: str = proto.Field( + proto.STRING, + number=6, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + provisioned_resources_parent: str = proto.Field( + proto.STRING, + number=13, + ) + kms_settings: KMSSettings = proto.Field( + proto.MESSAGE, + number=14, + message=KMSSettings, + ) + resource_settings: MutableSequence[ResourceSettings] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message=ResourceSettings, + ) + kaj_enrollment_state: KajEnrollmentState = proto.Field( + proto.ENUM, + number=17, + enum=KajEnrollmentState, + ) + enable_sovereign_controls: bool = proto.Field( + proto.BOOL, + number=18, + ) + saa_enrollment_response: SaaEnrollmentResponse = proto.Field( + proto.MESSAGE, + number=20, + message=SaaEnrollmentResponse, + ) + compliant_but_disallowed_services: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=24, + ) + partner: Partner = proto.Field( + proto.ENUM, + number=25, + enum=Partner, + ) + + +class CreateWorkloadOperationMetadata(proto.Message): + r"""Operation metadata to give request details of CreateWorkload. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Time when the operation was + created. + display_name (str): + Optional. The display name of the workload. + parent (str): + Optional. The parent of the workload. + compliance_regime (google.cloud.assuredworkloads_v1.types.Workload.ComplianceRegime): + Optional. Compliance controls that should be + applied to the resources managed by the + workload. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + parent: str = proto.Field( + proto.STRING, + number=3, + ) + compliance_regime: "Workload.ComplianceRegime" = proto.Field( + proto.ENUM, + number=4, + enum="Workload.ComplianceRegime", + ) + + +class RestrictAllowedResourcesRequest(proto.Message): + r"""Request for restricting list of available resources in + Workload environment. + + Attributes: + name (str): + Required. The resource name of the Workload. This is the + workloads's relative path in the API, formatted as + "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". + For example, + "organizations/123/locations/us-east1/workloads/assured-workload-1". + restriction_type (google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesRequest.RestrictionType): + Required. The type of restriction for using + gcp products in the Workload environment. + """ + + class RestrictionType(proto.Enum): + r"""The type of restriction. + + Values: + RESTRICTION_TYPE_UNSPECIFIED (0): + Unknown restriction type. + ALLOW_ALL_GCP_RESOURCES (1): + Allow the use all of all gcp products, + irrespective of the compliance posture. This + effectively removes gcp.restrictServiceUsage + OrgPolicy on the AssuredWorkloads Folder. + ALLOW_COMPLIANT_RESOURCES (2): + Based on Workload's compliance regime, + allowed list changes. See - + https://cloud.google.com/assured-workloads/docs/supported-products + for the list of supported resources. + """ + RESTRICTION_TYPE_UNSPECIFIED = 0 + ALLOW_ALL_GCP_RESOURCES = 1 + ALLOW_COMPLIANT_RESOURCES = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + restriction_type: RestrictionType = proto.Field( + proto.ENUM, + number=2, + enum=RestrictionType, + ) + + +class RestrictAllowedResourcesResponse(proto.Message): + r"""Response for restricting the list of allowed resources.""" + + +class AcknowledgeViolationRequest(proto.Message): + r"""Request for acknowledging the violation + Next Id: 4 + + Attributes: + name (str): + Required. The resource name of the Violation + to acknowledge. Format: + + organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} + comment (str): + Required. Business justification explaining + the need for violation acknowledgement + non_compliant_org_policy (str): + Optional. This field is deprecated and will be removed in + future version of the API. Name of the OrgPolicy which was + modified with non-compliant change and resulted in this + violation. Format: + projects/{project_number}/policies/{constraint_name} + folders/{folder_id}/policies/{constraint_name} + organizations/{organization_id}/policies/{constraint_name} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + comment: str = proto.Field( + proto.STRING, + number=2, + ) + non_compliant_org_policy: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AcknowledgeViolationResponse(proto.Message): + r"""Response for violation acknowledgement""" + + +class TimeWindow(proto.Message): + r"""Interval defining a time window. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The start of the time window. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end of the time window. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class ListViolationsRequest(proto.Message): + r"""Request for fetching violations in an organization. + + Attributes: + parent (str): + Required. The Workload name. Format + ``organizations/{org_id}/locations/{location}/workloads/{workload}``. + interval (google.cloud.assuredworkloads_v1.types.TimeWindow): + Optional. Specifies the time window for retrieving active + Violations. When specified, retrieves Violations that were + active between start_time and end_time. + page_size (int): + Optional. Page size. + page_token (str): + Optional. Page token returned from previous + request. + filter (str): + Optional. A custom filter for filtering by + the Violations properties. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + interval: "TimeWindow" = proto.Field( + proto.MESSAGE, + number=2, + message="TimeWindow", + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListViolationsResponse(proto.Message): + r"""Response of ListViolations endpoint. + + Attributes: + violations (MutableSequence[google.cloud.assuredworkloads_v1.types.Violation]): + List of Violations under a Workload. + next_page_token (str): + The next page token. Returns empty if reached + the last page. + """ + + @property + def raw_page(self): + return self + + violations: MutableSequence["Violation"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Violation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetViolationRequest(proto.Message): + r"""Request for fetching a Workload Violation. + + Attributes: + name (str): + Required. The resource name of the Violation + to fetch (ie. Violation.name). Format: + + organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Violation(proto.Message): + r"""Workload monitoring Violation. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Immutable. Name of the Violation. Format: + organizations/{organization}/locations/{location}/workloads/{workload_id}/violations/{violations_id} + description (str): + Output only. Description for the Violation. + e.g. OrgPolicy gcp.resourceLocations has non + compliant value. + begin_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time of the event which + triggered the Violation. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last time when the Violation + record was updated. + resolve_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time of the event which fixed + the Violation. If the violation is ACTIVE this + will be empty. + category (str): + Output only. Category under which this + violation is mapped. e.g. Location, Service + Usage, Access, Encryption, etc. + state (google.cloud.assuredworkloads_v1.types.Violation.State): + Output only. State of the violation + org_policy_constraint (str): + Output only. Immutable. The + org-policy-constraint that was incorrectly + changed, which resulted in this violation. + audit_log_link (str): + Output only. Immutable. Audit Log Link for + violated resource Format: + + https://console.cloud.google.com/logs/query;query={logName}{protoPayload.resourceName}{timeRange}{folder} + non_compliant_org_policy (str): + Output only. Immutable. Name of the OrgPolicy which was + modified with non-compliant change and resulted this + violation. Format: + projects/{project_number}/policies/{constraint_name} + folders/{folder_id}/policies/{constraint_name} + organizations/{organization_id}/policies/{constraint_name} + remediation (google.cloud.assuredworkloads_v1.types.Violation.Remediation): + Output only. Compliance violation remediation + acknowledged (bool): + Output only. A boolean that indicates if the + violation is acknowledged + acknowledgement_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Timestamp when this violation was + acknowledged last. This will be absent when + acknowledged field is marked as false. + + This field is a member of `oneof`_ ``_acknowledgement_time``. + exception_audit_log_link (str): + Output only. Immutable. Audit Log link to + find business justification provided for + violation exception. Format: + + https://console.cloud.google.com/logs/query;query={logName}{protoPayload.resourceName}{protoPayload.methodName}{timeRange}{organization} + """ + + class State(proto.Enum): + r"""Violation State Values + + Values: + STATE_UNSPECIFIED (0): + Unspecified state. + RESOLVED (2): + Violation is resolved. + UNRESOLVED (3): + Violation is Unresolved + EXCEPTION (4): + Violation is Exception + """ + STATE_UNSPECIFIED = 0 + RESOLVED = 2 + UNRESOLVED = 3 + EXCEPTION = 4 + + class Remediation(proto.Message): + r"""Represents remediation guidance to resolve compliance + violation for AssuredWorkload + + Attributes: + instructions (google.cloud.assuredworkloads_v1.types.Violation.Remediation.Instructions): + Required. Remediation instructions to resolve + violations + compliant_values (MutableSequence[str]): + Values that can resolve the violation + For example: for list org policy violations, + this will either be the list of allowed or + denied values + remediation_type (google.cloud.assuredworkloads_v1.types.Violation.Remediation.RemediationType): + Output only. Reemediation type based on the + type of org policy values violated + """ + + class RemediationType(proto.Enum): + r"""Classifying remediation into various types based on the kind + of violation. For example, violations caused due to changes in + boolean org policy requires different remediation instructions + compared to violation caused due to changes in allowed values of + list org policy. + + Values: + REMEDIATION_TYPE_UNSPECIFIED (0): + Unspecified remediation type + REMEDIATION_BOOLEAN_ORG_POLICY_VIOLATION (1): + Remediation type for boolean org policy + REMEDIATION_LIST_ALLOWED_VALUES_ORG_POLICY_VIOLATION (2): + Remediation type for list org policy which + have allowed values in the monitoring rule + REMEDIATION_LIST_DENIED_VALUES_ORG_POLICY_VIOLATION (3): + Remediation type for list org policy which + have denied values in the monitoring rule + REMEDIATION_RESTRICT_CMEK_CRYPTO_KEY_PROJECTS_ORG_POLICY_VIOLATION (4): + Remediation type for + gcp.restrictCmekCryptoKeyProjects + """ + REMEDIATION_TYPE_UNSPECIFIED = 0 + REMEDIATION_BOOLEAN_ORG_POLICY_VIOLATION = 1 + REMEDIATION_LIST_ALLOWED_VALUES_ORG_POLICY_VIOLATION = 2 + REMEDIATION_LIST_DENIED_VALUES_ORG_POLICY_VIOLATION = 3 + REMEDIATION_RESTRICT_CMEK_CRYPTO_KEY_PROJECTS_ORG_POLICY_VIOLATION = 4 + + class Instructions(proto.Message): + r"""Instructions to remediate violation + + Attributes: + gcloud_instructions (google.cloud.assuredworkloads_v1.types.Violation.Remediation.Instructions.Gcloud): + Remediation instructions to resolve violation + via gcloud cli + console_instructions (google.cloud.assuredworkloads_v1.types.Violation.Remediation.Instructions.Console): + Remediation instructions to resolve violation + via cloud console + """ + + class Gcloud(proto.Message): + r"""Remediation instructions to resolve violation via gcloud cli + + Attributes: + gcloud_commands (MutableSequence[str]): + Gcloud command to resolve violation + steps (MutableSequence[str]): + Steps to resolve violation via gcloud cli + additional_links (MutableSequence[str]): + Additional urls for more information about + steps + """ + + gcloud_commands: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + steps: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + additional_links: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + class Console(proto.Message): + r"""Remediation instructions to resolve violation via cloud + console + + Attributes: + console_uris (MutableSequence[str]): + Link to console page where violations can be + resolved + steps (MutableSequence[str]): + Steps to resolve violation via cloud console + additional_links (MutableSequence[str]): + Additional urls for more information about + steps + """ + + console_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + steps: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + additional_links: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + gcloud_instructions: "Violation.Remediation.Instructions.Gcloud" = ( + proto.Field( + proto.MESSAGE, + number=1, + message="Violation.Remediation.Instructions.Gcloud", + ) + ) + console_instructions: "Violation.Remediation.Instructions.Console" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="Violation.Remediation.Instructions.Console", + ) + ) + + instructions: "Violation.Remediation.Instructions" = proto.Field( + proto.MESSAGE, + number=1, + message="Violation.Remediation.Instructions", + ) + compliant_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + remediation_type: "Violation.Remediation.RemediationType" = proto.Field( + proto.ENUM, + number=3, + enum="Violation.Remediation.RemediationType", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + begin_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + resolve_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + category: str = proto.Field( + proto.STRING, + number=6, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + org_policy_constraint: str = proto.Field( + proto.STRING, + number=8, + ) + audit_log_link: str = proto.Field( + proto.STRING, + number=11, + ) + non_compliant_org_policy: str = proto.Field( + proto.STRING, + number=12, + ) + remediation: Remediation = proto.Field( + proto.MESSAGE, + number=13, + message=Remediation, + ) + acknowledged: bool = proto.Field( + proto.BOOL, + number=14, + ) + acknowledgement_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=15, + optional=True, + message=timestamp_pb2.Timestamp, + ) + exception_audit_log_link: str = proto.Field( + proto.STRING, + number=16, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/__init__.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/__init__.py new file mode 100644 index 000000000000..35b23623d053 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/__init__.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.assuredworkloads_v1beta1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.assured_workloads_service import ( + AssuredWorkloadsServiceAsyncClient, + AssuredWorkloadsServiceClient, +) +from .types.assuredworkloads import ( + AnalyzeWorkloadMoveRequest, + AnalyzeWorkloadMoveResponse, + CreateWorkloadOperationMetadata, + CreateWorkloadRequest, + DeleteWorkloadRequest, + GetWorkloadRequest, + ListWorkloadsRequest, + ListWorkloadsResponse, + RestrictAllowedResourcesRequest, + RestrictAllowedResourcesResponse, + UpdateWorkloadRequest, + Workload, +) + +__all__ = ( + "AssuredWorkloadsServiceAsyncClient", + "AnalyzeWorkloadMoveRequest", + "AnalyzeWorkloadMoveResponse", + "AssuredWorkloadsServiceClient", + "CreateWorkloadOperationMetadata", + "CreateWorkloadRequest", + "DeleteWorkloadRequest", + "GetWorkloadRequest", + "ListWorkloadsRequest", + "ListWorkloadsResponse", + "RestrictAllowedResourcesRequest", + "RestrictAllowedResourcesResponse", + "UpdateWorkloadRequest", + "Workload", +) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_metadata.json b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_metadata.json new file mode 100644 index 000000000000..68c3be657cf7 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_metadata.json @@ -0,0 +1,133 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.assuredworkloads_v1beta1", + "protoPackage": "google.cloud.assuredworkloads.v1beta1", + "schema": "1.0", + "services": { + "AssuredWorkloadsService": { + "clients": { + "grpc": { + "libraryClient": "AssuredWorkloadsServiceClient", + "rpcs": { + "AnalyzeWorkloadMove": { + "methods": [ + "analyze_workload_move" + ] + }, + "CreateWorkload": { + "methods": [ + "create_workload" + ] + }, + "DeleteWorkload": { + "methods": [ + "delete_workload" + ] + }, + "GetWorkload": { + "methods": [ + "get_workload" + ] + }, + "ListWorkloads": { + "methods": [ + "list_workloads" + ] + }, + "RestrictAllowedResources": { + "methods": [ + "restrict_allowed_resources" + ] + }, + "UpdateWorkload": { + "methods": [ + "update_workload" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AssuredWorkloadsServiceAsyncClient", + "rpcs": { + "AnalyzeWorkloadMove": { + "methods": [ + "analyze_workload_move" + ] + }, + "CreateWorkload": { + "methods": [ + "create_workload" + ] + }, + "DeleteWorkload": { + "methods": [ + "delete_workload" + ] + }, + "GetWorkload": { + "methods": [ + "get_workload" + ] + }, + "ListWorkloads": { + "methods": [ + "list_workloads" + ] + }, + "RestrictAllowedResources": { + "methods": [ + "restrict_allowed_resources" + ] + }, + "UpdateWorkload": { + "methods": [ + "update_workload" + ] + } + } + }, + "rest": { + "libraryClient": "AssuredWorkloadsServiceClient", + "rpcs": { + "AnalyzeWorkloadMove": { + "methods": [ + "analyze_workload_move" + ] + }, + "CreateWorkload": { + "methods": [ + "create_workload" + ] + }, + "DeleteWorkload": { + "methods": [ + "delete_workload" + ] + }, + "GetWorkload": { + "methods": [ + "get_workload" + ] + }, + "ListWorkloads": { + "methods": [ + "list_workloads" + ] + }, + "RestrictAllowedResources": { + "methods": [ + "restrict_allowed_resources" + ] + }, + "UpdateWorkload": { + "methods": [ + "update_workload" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py new file mode 100644 index 000000000000..2d55bb3824da --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.10.3" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/py.typed b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/py.typed new file mode 100644 index 000000000000..3762b50eeb3d --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-assured-workloads package uses inline types. diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/__init__.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/__init__.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/__init__.py new file mode 100644 index 000000000000..963d19bb2128 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AssuredWorkloadsServiceAsyncClient +from .client import AssuredWorkloadsServiceClient + +__all__ = ( + "AssuredWorkloadsServiceClient", + "AssuredWorkloadsServiceAsyncClient", +) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py new file mode 100644 index 000000000000..bd01a4b23f86 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py @@ -0,0 +1,1150 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.assuredworkloads_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service import ( + pagers, +) +from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads + +from .client import AssuredWorkloadsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AssuredWorkloadsServiceTransport +from .transports.grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport + + +class AssuredWorkloadsServiceAsyncClient: + """Service to manage AssuredWorkloads.""" + + _client: AssuredWorkloadsServiceClient + + DEFAULT_ENDPOINT = AssuredWorkloadsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT + + workload_path = staticmethod(AssuredWorkloadsServiceClient.workload_path) + parse_workload_path = staticmethod( + AssuredWorkloadsServiceClient.parse_workload_path + ) + common_billing_account_path = staticmethod( + AssuredWorkloadsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AssuredWorkloadsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AssuredWorkloadsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + AssuredWorkloadsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AssuredWorkloadsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AssuredWorkloadsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + AssuredWorkloadsServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + AssuredWorkloadsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + AssuredWorkloadsServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + AssuredWorkloadsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssuredWorkloadsServiceAsyncClient: The constructed client. + """ + return AssuredWorkloadsServiceClient.from_service_account_info.__func__(AssuredWorkloadsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssuredWorkloadsServiceAsyncClient: The constructed client. + """ + return AssuredWorkloadsServiceClient.from_service_account_file.__func__(AssuredWorkloadsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AssuredWorkloadsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AssuredWorkloadsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AssuredWorkloadsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(AssuredWorkloadsServiceClient).get_transport_class, + type(AssuredWorkloadsServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AssuredWorkloadsServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the assured workloads service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AssuredWorkloadsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AssuredWorkloadsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_workload( + self, + request: Optional[Union[assuredworkloads.CreateWorkloadRequest, dict]] = None, + *, + parent: Optional[str] = None, + workload: Optional[assuredworkloads.Workload] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates Assured Workload. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + async def sample_create_workload(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + workload = assuredworkloads_v1beta1.Workload() + workload.display_name = "display_name_value" + workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" + + request = assuredworkloads_v1beta1.CreateWorkloadRequest( + parent="parent_value", + workload=workload, + ) + + # Make the request + operation = client.create_workload(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.CreateWorkloadRequest, dict]]): + The request object. Request for creating a workload. + parent (:class:`str`): + Required. The resource name of the new Workload's + parent. Must be of the form + ``organizations/{org_id}/locations/{location_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workload (:class:`google.cloud.assuredworkloads_v1beta1.types.Workload`): + Required. Assured Workload to create + This corresponds to the ``workload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.assuredworkloads_v1beta1.types.Workload` An Workload object for managing highly regulated workloads of cloud + customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workload]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.CreateWorkloadRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workload is not None: + request.workload = workload + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_workload, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + assuredworkloads.Workload, + metadata_type=assuredworkloads.CreateWorkloadOperationMetadata, + ) + + # Done; return the response. + return response + + async def update_workload( + self, + request: Optional[Union[assuredworkloads.UpdateWorkloadRequest, dict]] = None, + *, + workload: Optional[assuredworkloads.Workload] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + r"""Updates an existing workload. Currently allows updating of + workload display_name and labels. For force updates don't set + etag field in the Workload. Only one update operation per + workload can be in progress. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + async def sample_update_workload(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + workload = assuredworkloads_v1beta1.Workload() + workload.display_name = "display_name_value" + workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" + + request = assuredworkloads_v1beta1.UpdateWorkloadRequest( + workload=workload, + ) + + # Make the request + response = await client.update_workload(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.UpdateWorkloadRequest, dict]]): + The request object. Request for Updating a workload. + workload (:class:`google.cloud.assuredworkloads_v1beta1.types.Workload`): + Required. The workload to update. The workload's + ``name`` field is used to identify the workload to be + updated. Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + + This corresponds to the ``workload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1beta1.types.Workload: + An Workload object for managing + highly regulated workloads of cloud + customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workload, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.UpdateWorkloadRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workload is not None: + request.workload = workload + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_workload, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def restrict_allowed_resources( + self, + request: Optional[ + Union[assuredworkloads.RestrictAllowedResourcesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.RestrictAllowedResourcesResponse: + r"""Restrict the list of resources allowed in the + Workload environment. The current list of allowed + products can be found at + https://cloud.google.com/assured-workloads/docs/supported-products + In addition to assuredworkloads.workload.update + permission, the user should also have + orgpolicy.policy.set permission on the folder resource + to use this functionality. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + async def sample_restrict_allowed_resources(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1beta1.RestrictAllowedResourcesRequest( + name="name_value", + restriction_type="ALLOW_COMPLIANT_RESOURCES", + ) + + # Make the request + response = await client.restrict_allowed_resources(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesRequest, dict]]): + The request object. Request for restricting list of + available resources in Workload + environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesResponse: + Response for restricting the list of + allowed resources. + + """ + # Create or coerce a protobuf request object. + request = assuredworkloads.RestrictAllowedResourcesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restrict_allowed_resources, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_workload( + self, + request: Optional[Union[assuredworkloads.DeleteWorkloadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the workload. Make sure that workload's direct children + are already in a deleted state, otherwise the request will fail + with a FAILED_PRECONDITION error. In addition to + assuredworkloads.workload.delete permission, the user should + also have orgpolicy.policy.set permission on the deleted folder + to remove Assured Workloads OrgPolicies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + async def sample_delete_workload(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1beta1.DeleteWorkloadRequest( + name="name_value", + ) + + # Make the request + await client.delete_workload(request=request) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.DeleteWorkloadRequest, dict]]): + The request object. Request for deleting a Workload. + name (:class:`str`): + Required. The ``name`` field is used to identify the + workload. Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.DeleteWorkloadRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_workload, + default_retry=retries.Retry( + initial=0.2, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_workload( + self, + request: Optional[Union[assuredworkloads.GetWorkloadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + r"""Gets Assured Workload associated with a CRM Node + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + async def sample_get_workload(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1beta1.GetWorkloadRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workload(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.GetWorkloadRequest, dict]]): + The request object. Request for fetching a workload. + name (:class:`str`): + Required. The resource name of the Workload to fetch. + This is the workloads's relative path in the API, + formatted as + "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". + For example, + "organizations/123/locations/us-east1/workloads/assured-workload-1". + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1beta1.types.Workload: + An Workload object for managing + highly regulated workloads of cloud + customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.GetWorkloadRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_workload, + default_retry=retries.Retry( + initial=0.2, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_workload_move( + self, + request: Optional[ + Union[assuredworkloads.AnalyzeWorkloadMoveRequest, dict] + ] = None, + *, + project: Optional[str] = None, + target: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.AnalyzeWorkloadMoveResponse: + r"""Analyze if the source Assured Workloads can be moved + to the target Assured Workload + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + async def sample_analyze_workload_move(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1beta1.AnalyzeWorkloadMoveRequest( + source="source_value", + target="target_value", + ) + + # Make the request + response = await client.analyze_workload_move(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveRequest, dict]]): + The request object. A request to analyze a hypothetical + move of a source project or + project-based workload to a target + (destination) folder-based workload. + project (:class:`str`): + The source type is a project. Specify the project's + relative resource name, formatted as either a project + number or a project ID: "projects/{PROJECT_NUMBER}" or + "projects/{PROJECT_ID}" For example: + "projects/951040570662" when specifying a project + number, or "projects/my-project-123" when specifying a + project ID. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target (:class:`str`): + Required. The resource ID of the folder-based + destination workload. This workload is where the source + project will hypothetically be moved to. Specify the + workload's relative resource name, formatted as: + "organizations/{ORGANIZATION_ID}/locations/{LOCATION_ID}/workloads/{WORKLOAD_ID}" + For example: + "organizations/123/locations/us-east1/workloads/assured-workload-2" + + This corresponds to the ``target`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveResponse: + A response that includes the analysis + of the hypothetical resource move. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.AnalyzeWorkloadMoveRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target is not None: + request.target = target + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_workload_move, + default_retry=retries.Retry( + initial=0.2, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_workloads( + self, + request: Optional[Union[assuredworkloads.ListWorkloadsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkloadsAsyncPager: + r"""Lists Assured Workloads under a CRM Node. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + async def sample_list_workloads(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() + + # Initialize request argument(s) + request = assuredworkloads_v1beta1.ListWorkloadsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workloads(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest, dict]]): + The request object. Request for fetching workloads in an + organization. + parent (:class:`str`): + Required. Parent Resource to list workloads from. Must + be of the form + ``organizations/{org_id}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.pagers.ListWorkloadsAsyncPager: + Response of ListWorkloads endpoint. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = assuredworkloads.ListWorkloadsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_workloads, + default_retry=retries.Retry( + initial=0.2, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkloadsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AssuredWorkloadsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AssuredWorkloadsServiceAsyncClient",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py new file mode 100644 index 000000000000..34ab97259a6e --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py @@ -0,0 +1,1346 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.assuredworkloads_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service import ( + pagers, +) +from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads + +from .transports.base import DEFAULT_CLIENT_INFO, AssuredWorkloadsServiceTransport +from .transports.grpc import AssuredWorkloadsServiceGrpcTransport +from .transports.grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport +from .transports.rest import AssuredWorkloadsServiceRestTransport + + +class AssuredWorkloadsServiceClientMeta(type): + """Metaclass for the AssuredWorkloadsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AssuredWorkloadsServiceTransport]] + _transport_registry["grpc"] = AssuredWorkloadsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AssuredWorkloadsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AssuredWorkloadsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AssuredWorkloadsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AssuredWorkloadsServiceClient(metaclass=AssuredWorkloadsServiceClientMeta): + """Service to manage AssuredWorkloads.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "assuredworkloads.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssuredWorkloadsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssuredWorkloadsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AssuredWorkloadsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AssuredWorkloadsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def workload_path( + organization: str, + location: str, + workload: str, + ) -> str: + """Returns a fully-qualified workload string.""" + return "organizations/{organization}/locations/{location}/workloads/{workload}".format( + organization=organization, + location=location, + workload=workload, + ) + + @staticmethod + def parse_workload_path(path: str) -> Dict[str, str]: + """Parses a workload path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/workloads/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AssuredWorkloadsServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the assured workloads service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AssuredWorkloadsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AssuredWorkloadsServiceTransport): + # transport is a AssuredWorkloadsServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_workload( + self, + request: Optional[Union[assuredworkloads.CreateWorkloadRequest, dict]] = None, + *, + parent: Optional[str] = None, + workload: Optional[assuredworkloads.Workload] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates Assured Workload. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + def sample_create_workload(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + workload = assuredworkloads_v1beta1.Workload() + workload.display_name = "display_name_value" + workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" + + request = assuredworkloads_v1beta1.CreateWorkloadRequest( + parent="parent_value", + workload=workload, + ) + + # Make the request + operation = client.create_workload(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1beta1.types.CreateWorkloadRequest, dict]): + The request object. Request for creating a workload. + parent (str): + Required. The resource name of the new Workload's + parent. Must be of the form + ``organizations/{org_id}/locations/{location_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workload (google.cloud.assuredworkloads_v1beta1.types.Workload): + Required. Assured Workload to create + This corresponds to the ``workload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.assuredworkloads_v1beta1.types.Workload` An Workload object for managing highly regulated workloads of cloud + customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workload]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.CreateWorkloadRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.CreateWorkloadRequest): + request = assuredworkloads.CreateWorkloadRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if workload is not None: + request.workload = workload + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_workload] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + assuredworkloads.Workload, + metadata_type=assuredworkloads.CreateWorkloadOperationMetadata, + ) + + # Done; return the response. + return response + + def update_workload( + self, + request: Optional[Union[assuredworkloads.UpdateWorkloadRequest, dict]] = None, + *, + workload: Optional[assuredworkloads.Workload] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + r"""Updates an existing workload. Currently allows updating of + workload display_name and labels. For force updates don't set + etag field in the Workload. Only one update operation per + workload can be in progress. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + def sample_update_workload(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + workload = assuredworkloads_v1beta1.Workload() + workload.display_name = "display_name_value" + workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" + + request = assuredworkloads_v1beta1.UpdateWorkloadRequest( + workload=workload, + ) + + # Make the request + response = client.update_workload(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1beta1.types.UpdateWorkloadRequest, dict]): + The request object. Request for Updating a workload. + workload (google.cloud.assuredworkloads_v1beta1.types.Workload): + Required. The workload to update. The workload's + ``name`` field is used to identify the workload to be + updated. Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + + This corresponds to the ``workload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1beta1.types.Workload: + An Workload object for managing + highly regulated workloads of cloud + customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workload, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.UpdateWorkloadRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.UpdateWorkloadRequest): + request = assuredworkloads.UpdateWorkloadRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if workload is not None: + request.workload = workload + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_workload] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def restrict_allowed_resources( + self, + request: Optional[ + Union[assuredworkloads.RestrictAllowedResourcesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.RestrictAllowedResourcesResponse: + r"""Restrict the list of resources allowed in the + Workload environment. The current list of allowed + products can be found at + https://cloud.google.com/assured-workloads/docs/supported-products + In addition to assuredworkloads.workload.update + permission, the user should also have + orgpolicy.policy.set permission on the folder resource + to use this functionality. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + def sample_restrict_allowed_resources(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1beta1.RestrictAllowedResourcesRequest( + name="name_value", + restriction_type="ALLOW_COMPLIANT_RESOURCES", + ) + + # Make the request + response = client.restrict_allowed_resources(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesRequest, dict]): + The request object. Request for restricting list of + available resources in Workload + environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesResponse: + Response for restricting the list of + allowed resources. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.RestrictAllowedResourcesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.RestrictAllowedResourcesRequest): + request = assuredworkloads.RestrictAllowedResourcesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.restrict_allowed_resources + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_workload( + self, + request: Optional[Union[assuredworkloads.DeleteWorkloadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the workload. Make sure that workload's direct children + are already in a deleted state, otherwise the request will fail + with a FAILED_PRECONDITION error. In addition to + assuredworkloads.workload.delete permission, the user should + also have orgpolicy.policy.set permission on the deleted folder + to remove Assured Workloads OrgPolicies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + def sample_delete_workload(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1beta1.DeleteWorkloadRequest( + name="name_value", + ) + + # Make the request + client.delete_workload(request=request) + + Args: + request (Union[google.cloud.assuredworkloads_v1beta1.types.DeleteWorkloadRequest, dict]): + The request object. Request for deleting a Workload. + name (str): + Required. The ``name`` field is used to identify the + workload. Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.DeleteWorkloadRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.DeleteWorkloadRequest): + request = assuredworkloads.DeleteWorkloadRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_workload] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_workload( + self, + request: Optional[Union[assuredworkloads.GetWorkloadRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + r"""Gets Assured Workload associated with a CRM Node + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + def sample_get_workload(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1beta1.GetWorkloadRequest( + name="name_value", + ) + + # Make the request + response = client.get_workload(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1beta1.types.GetWorkloadRequest, dict]): + The request object. Request for fetching a workload. + name (str): + Required. The resource name of the Workload to fetch. + This is the workloads's relative path in the API, + formatted as + "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". + For example, + "organizations/123/locations/us-east1/workloads/assured-workload-1". + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1beta1.types.Workload: + An Workload object for managing + highly regulated workloads of cloud + customers. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.GetWorkloadRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.GetWorkloadRequest): + request = assuredworkloads.GetWorkloadRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_workload] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_workload_move( + self, + request: Optional[ + Union[assuredworkloads.AnalyzeWorkloadMoveRequest, dict] + ] = None, + *, + project: Optional[str] = None, + target: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.AnalyzeWorkloadMoveResponse: + r"""Analyze if the source Assured Workloads can be moved + to the target Assured Workload + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + def sample_analyze_workload_move(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1beta1.AnalyzeWorkloadMoveRequest( + source="source_value", + target="target_value", + ) + + # Make the request + response = client.analyze_workload_move(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveRequest, dict]): + The request object. A request to analyze a hypothetical + move of a source project or + project-based workload to a target + (destination) folder-based workload. + project (str): + The source type is a project. Specify the project's + relative resource name, formatted as either a project + number or a project ID: "projects/{PROJECT_NUMBER}" or + "projects/{PROJECT_ID}" For example: + "projects/951040570662" when specifying a project + number, or "projects/my-project-123" when specifying a + project ID. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target (str): + Required. The resource ID of the folder-based + destination workload. This workload is where the source + project will hypothetically be moved to. Specify the + workload's relative resource name, formatted as: + "organizations/{ORGANIZATION_ID}/locations/{LOCATION_ID}/workloads/{WORKLOAD_ID}" + For example: + "organizations/123/locations/us-east1/workloads/assured-workload-2" + + This corresponds to the ``target`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveResponse: + A response that includes the analysis + of the hypothetical resource move. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, target]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.AnalyzeWorkloadMoveRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.AnalyzeWorkloadMoveRequest): + request = assuredworkloads.AnalyzeWorkloadMoveRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if target is not None: + request.target = target + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_workload_move] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_workloads( + self, + request: Optional[Union[assuredworkloads.ListWorkloadsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkloadsPager: + r"""Lists Assured Workloads under a CRM Node. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import assuredworkloads_v1beta1 + + def sample_list_workloads(): + # Create a client + client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() + + # Initialize request argument(s) + request = assuredworkloads_v1beta1.ListWorkloadsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workloads(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest, dict]): + The request object. Request for fetching workloads in an + organization. + parent (str): + Required. Parent Resource to list workloads from. Must + be of the form + ``organizations/{org_id}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.pagers.ListWorkloadsPager: + Response of ListWorkloads endpoint. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a assuredworkloads.ListWorkloadsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, assuredworkloads.ListWorkloadsRequest): + request = assuredworkloads.ListWorkloadsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_workloads] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkloadsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AssuredWorkloadsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AssuredWorkloadsServiceClient",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/pagers.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/pagers.py new file mode 100644 index 000000000000..320ca3df8d7e --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads + + +class ListWorkloadsPager: + """A pager for iterating through ``list_workloads`` requests. + + This class thinly wraps an initial + :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workloads`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkloads`` requests and continue to iterate + through the ``workloads`` field on the + corresponding responses. + + All the usual :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., assuredworkloads.ListWorkloadsResponse], + request: assuredworkloads.ListWorkloadsRequest, + response: assuredworkloads.ListWorkloadsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest): + The initial request object. + response (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = assuredworkloads.ListWorkloadsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[assuredworkloads.ListWorkloadsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[assuredworkloads.Workload]: + for page in self.pages: + yield from page.workloads + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkloadsAsyncPager: + """A pager for iterating through ``list_workloads`` requests. + + This class thinly wraps an initial + :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workloads`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkloads`` requests and continue to iterate + through the ``workloads`` field on the + corresponding responses. + + All the usual :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[assuredworkloads.ListWorkloadsResponse]], + request: assuredworkloads.ListWorkloadsRequest, + response: assuredworkloads.ListWorkloadsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest): + The initial request object. + response (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = assuredworkloads.ListWorkloadsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[assuredworkloads.ListWorkloadsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[assuredworkloads.Workload]: + async def async_generator(): + async for page in self.pages: + for response in page.workloads: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/__init__.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/__init__.py new file mode 100644 index 000000000000..077f25e35559 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AssuredWorkloadsServiceTransport +from .grpc import AssuredWorkloadsServiceGrpcTransport +from .grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport +from .rest import ( + AssuredWorkloadsServiceRestInterceptor, + AssuredWorkloadsServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[AssuredWorkloadsServiceTransport]] +_transport_registry["grpc"] = AssuredWorkloadsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AssuredWorkloadsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AssuredWorkloadsServiceRestTransport + +__all__ = ( + "AssuredWorkloadsServiceTransport", + "AssuredWorkloadsServiceGrpcTransport", + "AssuredWorkloadsServiceGrpcAsyncIOTransport", + "AssuredWorkloadsServiceRestTransport", + "AssuredWorkloadsServiceRestInterceptor", +) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/base.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/base.py new file mode 100644 index 000000000000..7f6c43bc8718 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/base.py @@ -0,0 +1,312 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.assuredworkloads_v1beta1 import gapic_version as package_version +from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AssuredWorkloadsServiceTransport(abc.ABC): + """Abstract transport class for AssuredWorkloadsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "assuredworkloads.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_workload: gapic_v1.method.wrap_method( + self.create_workload, + default_timeout=60.0, + client_info=client_info, + ), + self.update_workload: gapic_v1.method.wrap_method( + self.update_workload, + default_timeout=60.0, + client_info=client_info, + ), + self.restrict_allowed_resources: gapic_v1.method.wrap_method( + self.restrict_allowed_resources, + default_timeout=None, + client_info=client_info, + ), + self.delete_workload: gapic_v1.method.wrap_method( + self.delete_workload, + default_retry=retries.Retry( + initial=0.2, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_workload: gapic_v1.method.wrap_method( + self.get_workload, + default_retry=retries.Retry( + initial=0.2, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.analyze_workload_move: gapic_v1.method.wrap_method( + self.analyze_workload_move, + default_retry=retries.Retry( + initial=0.2, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_workloads: gapic_v1.method.wrap_method( + self.list_workloads, + default_retry=retries.Retry( + initial=0.2, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_workload( + self, + ) -> Callable[ + [assuredworkloads.CreateWorkloadRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_workload( + self, + ) -> Callable[ + [assuredworkloads.UpdateWorkloadRequest], + Union[assuredworkloads.Workload, Awaitable[assuredworkloads.Workload]], + ]: + raise NotImplementedError() + + @property + def restrict_allowed_resources( + self, + ) -> Callable[ + [assuredworkloads.RestrictAllowedResourcesRequest], + Union[ + assuredworkloads.RestrictAllowedResourcesResponse, + Awaitable[assuredworkloads.RestrictAllowedResourcesResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_workload( + self, + ) -> Callable[ + [assuredworkloads.DeleteWorkloadRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_workload( + self, + ) -> Callable[ + [assuredworkloads.GetWorkloadRequest], + Union[assuredworkloads.Workload, Awaitable[assuredworkloads.Workload]], + ]: + raise NotImplementedError() + + @property + def analyze_workload_move( + self, + ) -> Callable[ + [assuredworkloads.AnalyzeWorkloadMoveRequest], + Union[ + assuredworkloads.AnalyzeWorkloadMoveResponse, + Awaitable[assuredworkloads.AnalyzeWorkloadMoveResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_workloads( + self, + ) -> Callable[ + [assuredworkloads.ListWorkloadsRequest], + Union[ + assuredworkloads.ListWorkloadsResponse, + Awaitable[assuredworkloads.ListWorkloadsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AssuredWorkloadsServiceTransport",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc.py new file mode 100644 index 000000000000..ed323c5643f4 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc.py @@ -0,0 +1,498 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads + +from .base import DEFAULT_CLIENT_INFO, AssuredWorkloadsServiceTransport + + +class AssuredWorkloadsServiceGrpcTransport(AssuredWorkloadsServiceTransport): + """gRPC backend transport for AssuredWorkloadsService. + + Service to manage AssuredWorkloads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "assuredworkloads.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "assuredworkloads.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_workload( + self, + ) -> Callable[[assuredworkloads.CreateWorkloadRequest], operations_pb2.Operation]: + r"""Return a callable for the create workload method over gRPC. + + Creates Assured Workload. + + Returns: + Callable[[~.CreateWorkloadRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workload" not in self._stubs: + self._stubs["create_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/CreateWorkload", + request_serializer=assuredworkloads.CreateWorkloadRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workload"] + + @property + def update_workload( + self, + ) -> Callable[[assuredworkloads.UpdateWorkloadRequest], assuredworkloads.Workload]: + r"""Return a callable for the update workload method over gRPC. + + Updates an existing workload. Currently allows updating of + workload display_name and labels. For force updates don't set + etag field in the Workload. Only one update operation per + workload can be in progress. + + Returns: + Callable[[~.UpdateWorkloadRequest], + ~.Workload]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workload" not in self._stubs: + self._stubs["update_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/UpdateWorkload", + request_serializer=assuredworkloads.UpdateWorkloadRequest.serialize, + response_deserializer=assuredworkloads.Workload.deserialize, + ) + return self._stubs["update_workload"] + + @property + def restrict_allowed_resources( + self, + ) -> Callable[ + [assuredworkloads.RestrictAllowedResourcesRequest], + assuredworkloads.RestrictAllowedResourcesResponse, + ]: + r"""Return a callable for the restrict allowed resources method over gRPC. + + Restrict the list of resources allowed in the + Workload environment. The current list of allowed + products can be found at + https://cloud.google.com/assured-workloads/docs/supported-products + In addition to assuredworkloads.workload.update + permission, the user should also have + orgpolicy.policy.set permission on the folder resource + to use this functionality. + + Returns: + Callable[[~.RestrictAllowedResourcesRequest], + ~.RestrictAllowedResourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restrict_allowed_resources" not in self._stubs: + self._stubs["restrict_allowed_resources"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/RestrictAllowedResources", + request_serializer=assuredworkloads.RestrictAllowedResourcesRequest.serialize, + response_deserializer=assuredworkloads.RestrictAllowedResourcesResponse.deserialize, + ) + return self._stubs["restrict_allowed_resources"] + + @property + def delete_workload( + self, + ) -> Callable[[assuredworkloads.DeleteWorkloadRequest], empty_pb2.Empty]: + r"""Return a callable for the delete workload method over gRPC. + + Deletes the workload. Make sure that workload's direct children + are already in a deleted state, otherwise the request will fail + with a FAILED_PRECONDITION error. In addition to + assuredworkloads.workload.delete permission, the user should + also have orgpolicy.policy.set permission on the deleted folder + to remove Assured Workloads OrgPolicies. + + Returns: + Callable[[~.DeleteWorkloadRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workload" not in self._stubs: + self._stubs["delete_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/DeleteWorkload", + request_serializer=assuredworkloads.DeleteWorkloadRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_workload"] + + @property + def get_workload( + self, + ) -> Callable[[assuredworkloads.GetWorkloadRequest], assuredworkloads.Workload]: + r"""Return a callable for the get workload method over gRPC. + + Gets Assured Workload associated with a CRM Node + + Returns: + Callable[[~.GetWorkloadRequest], + ~.Workload]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workload" not in self._stubs: + self._stubs["get_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/GetWorkload", + request_serializer=assuredworkloads.GetWorkloadRequest.serialize, + response_deserializer=assuredworkloads.Workload.deserialize, + ) + return self._stubs["get_workload"] + + @property + def analyze_workload_move( + self, + ) -> Callable[ + [assuredworkloads.AnalyzeWorkloadMoveRequest], + assuredworkloads.AnalyzeWorkloadMoveResponse, + ]: + r"""Return a callable for the analyze workload move method over gRPC. + + Analyze if the source Assured Workloads can be moved + to the target Assured Workload + + Returns: + Callable[[~.AnalyzeWorkloadMoveRequest], + ~.AnalyzeWorkloadMoveResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_workload_move" not in self._stubs: + self._stubs["analyze_workload_move"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/AnalyzeWorkloadMove", + request_serializer=assuredworkloads.AnalyzeWorkloadMoveRequest.serialize, + response_deserializer=assuredworkloads.AnalyzeWorkloadMoveResponse.deserialize, + ) + return self._stubs["analyze_workload_move"] + + @property + def list_workloads( + self, + ) -> Callable[ + [assuredworkloads.ListWorkloadsRequest], assuredworkloads.ListWorkloadsResponse + ]: + r"""Return a callable for the list workloads method over gRPC. + + Lists Assured Workloads under a CRM Node. + + Returns: + Callable[[~.ListWorkloadsRequest], + ~.ListWorkloadsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workloads" not in self._stubs: + self._stubs["list_workloads"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/ListWorkloads", + request_serializer=assuredworkloads.ListWorkloadsRequest.serialize, + response_deserializer=assuredworkloads.ListWorkloadsResponse.deserialize, + ) + return self._stubs["list_workloads"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AssuredWorkloadsServiceGrpcTransport",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc_asyncio.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..09aeb17f10aa --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc_asyncio.py @@ -0,0 +1,506 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads + +from .base import DEFAULT_CLIENT_INFO, AssuredWorkloadsServiceTransport +from .grpc import AssuredWorkloadsServiceGrpcTransport + + +class AssuredWorkloadsServiceGrpcAsyncIOTransport(AssuredWorkloadsServiceTransport): + """gRPC AsyncIO backend transport for AssuredWorkloadsService. + + Service to manage AssuredWorkloads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "assuredworkloads.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "assuredworkloads.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_workload( + self, + ) -> Callable[ + [assuredworkloads.CreateWorkloadRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create workload method over gRPC. + + Creates Assured Workload. + + Returns: + Callable[[~.CreateWorkloadRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workload" not in self._stubs: + self._stubs["create_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/CreateWorkload", + request_serializer=assuredworkloads.CreateWorkloadRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_workload"] + + @property + def update_workload( + self, + ) -> Callable[ + [assuredworkloads.UpdateWorkloadRequest], Awaitable[assuredworkloads.Workload] + ]: + r"""Return a callable for the update workload method over gRPC. + + Updates an existing workload. Currently allows updating of + workload display_name and labels. For force updates don't set + etag field in the Workload. Only one update operation per + workload can be in progress. + + Returns: + Callable[[~.UpdateWorkloadRequest], + Awaitable[~.Workload]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workload" not in self._stubs: + self._stubs["update_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/UpdateWorkload", + request_serializer=assuredworkloads.UpdateWorkloadRequest.serialize, + response_deserializer=assuredworkloads.Workload.deserialize, + ) + return self._stubs["update_workload"] + + @property + def restrict_allowed_resources( + self, + ) -> Callable[ + [assuredworkloads.RestrictAllowedResourcesRequest], + Awaitable[assuredworkloads.RestrictAllowedResourcesResponse], + ]: + r"""Return a callable for the restrict allowed resources method over gRPC. + + Restrict the list of resources allowed in the + Workload environment. The current list of allowed + products can be found at + https://cloud.google.com/assured-workloads/docs/supported-products + In addition to assuredworkloads.workload.update + permission, the user should also have + orgpolicy.policy.set permission on the folder resource + to use this functionality. + + Returns: + Callable[[~.RestrictAllowedResourcesRequest], + Awaitable[~.RestrictAllowedResourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restrict_allowed_resources" not in self._stubs: + self._stubs["restrict_allowed_resources"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/RestrictAllowedResources", + request_serializer=assuredworkloads.RestrictAllowedResourcesRequest.serialize, + response_deserializer=assuredworkloads.RestrictAllowedResourcesResponse.deserialize, + ) + return self._stubs["restrict_allowed_resources"] + + @property + def delete_workload( + self, + ) -> Callable[[assuredworkloads.DeleteWorkloadRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete workload method over gRPC. + + Deletes the workload. Make sure that workload's direct children + are already in a deleted state, otherwise the request will fail + with a FAILED_PRECONDITION error. In addition to + assuredworkloads.workload.delete permission, the user should + also have orgpolicy.policy.set permission on the deleted folder + to remove Assured Workloads OrgPolicies. + + Returns: + Callable[[~.DeleteWorkloadRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workload" not in self._stubs: + self._stubs["delete_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/DeleteWorkload", + request_serializer=assuredworkloads.DeleteWorkloadRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_workload"] + + @property + def get_workload( + self, + ) -> Callable[ + [assuredworkloads.GetWorkloadRequest], Awaitable[assuredworkloads.Workload] + ]: + r"""Return a callable for the get workload method over gRPC. + + Gets Assured Workload associated with a CRM Node + + Returns: + Callable[[~.GetWorkloadRequest], + Awaitable[~.Workload]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workload" not in self._stubs: + self._stubs["get_workload"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/GetWorkload", + request_serializer=assuredworkloads.GetWorkloadRequest.serialize, + response_deserializer=assuredworkloads.Workload.deserialize, + ) + return self._stubs["get_workload"] + + @property + def analyze_workload_move( + self, + ) -> Callable[ + [assuredworkloads.AnalyzeWorkloadMoveRequest], + Awaitable[assuredworkloads.AnalyzeWorkloadMoveResponse], + ]: + r"""Return a callable for the analyze workload move method over gRPC. + + Analyze if the source Assured Workloads can be moved + to the target Assured Workload + + Returns: + Callable[[~.AnalyzeWorkloadMoveRequest], + Awaitable[~.AnalyzeWorkloadMoveResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_workload_move" not in self._stubs: + self._stubs["analyze_workload_move"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/AnalyzeWorkloadMove", + request_serializer=assuredworkloads.AnalyzeWorkloadMoveRequest.serialize, + response_deserializer=assuredworkloads.AnalyzeWorkloadMoveResponse.deserialize, + ) + return self._stubs["analyze_workload_move"] + + @property + def list_workloads( + self, + ) -> Callable[ + [assuredworkloads.ListWorkloadsRequest], + Awaitable[assuredworkloads.ListWorkloadsResponse], + ]: + r"""Return a callable for the list workloads method over gRPC. + + Lists Assured Workloads under a CRM Node. + + Returns: + Callable[[~.ListWorkloadsRequest], + Awaitable[~.ListWorkloadsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workloads" not in self._stubs: + self._stubs["list_workloads"] = self.grpc_channel.unary_unary( + "/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/ListWorkloads", + request_serializer=assuredworkloads.ListWorkloadsRequest.serialize, + response_deserializer=assuredworkloads.ListWorkloadsResponse.deserialize, + ) + return self._stubs["list_workloads"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("AssuredWorkloadsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest.py new file mode 100644 index 000000000000..56bef3b10d33 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest.py @@ -0,0 +1,919 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads + +from .base import AssuredWorkloadsServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AssuredWorkloadsServiceRestInterceptor: + """Interceptor for AssuredWorkloadsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AssuredWorkloadsServiceRestTransport. + + .. code-block:: python + class MyCustomAssuredWorkloadsServiceInterceptor(AssuredWorkloadsServiceRestInterceptor): + def pre_analyze_workload_move(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_workload_move(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_workload(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_workload(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_workload(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_workload(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_workload(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_workloads(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_workloads(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restrict_allowed_resources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restrict_allowed_resources(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_workload(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_workload(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AssuredWorkloadsServiceRestTransport(interceptor=MyCustomAssuredWorkloadsServiceInterceptor()) + client = AssuredWorkloadsServiceClient(transport=transport) + + + """ + + def pre_create_workload( + self, + request: assuredworkloads.CreateWorkloadRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[assuredworkloads.CreateWorkloadRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_workload + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def post_create_workload( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_workload + + Override in a subclass to manipulate the response + after it is returned by the AssuredWorkloadsService server but before + it is returned to user code. + """ + return response + + def pre_delete_workload( + self, + request: assuredworkloads.DeleteWorkloadRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[assuredworkloads.DeleteWorkloadRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_workload + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def pre_restrict_allowed_resources( + self, + request: assuredworkloads.RestrictAllowedResourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + assuredworkloads.RestrictAllowedResourcesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for restrict_allowed_resources + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def post_restrict_allowed_resources( + self, response: assuredworkloads.RestrictAllowedResourcesResponse + ) -> assuredworkloads.RestrictAllowedResourcesResponse: + """Post-rpc interceptor for restrict_allowed_resources + + Override in a subclass to manipulate the response + after it is returned by the AssuredWorkloadsService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AssuredWorkloadsService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssuredWorkloadsService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the AssuredWorkloadsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AssuredWorkloadsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AssuredWorkloadsServiceRestInterceptor + + +class AssuredWorkloadsServiceRestTransport(AssuredWorkloadsServiceTransport): + """REST backend transport for AssuredWorkloadsService. + + Service to manage AssuredWorkloads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "assuredworkloads.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AssuredWorkloadsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AssuredWorkloadsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta1/{name=organizations/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _AnalyzeWorkloadMove(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("AnalyzeWorkloadMove") + + def __call__( + self, + request: assuredworkloads.AnalyzeWorkloadMoveRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.AnalyzeWorkloadMoveResponse: + raise NotImplementedError( + "Method AnalyzeWorkloadMove is not available over REST transport" + ) + + class _CreateWorkload(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("CreateWorkload") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: assuredworkloads.CreateWorkloadRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create workload method over HTTP. + + Args: + request (~.assuredworkloads.CreateWorkloadRequest): + The request object. Request for creating a workload. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{parent=organizations/*/locations/*}/workloads", + "body": "workload", + }, + ] + request, metadata = self._interceptor.pre_create_workload(request, metadata) + pb_request = assuredworkloads.CreateWorkloadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_workload(resp) + return resp + + class _DeleteWorkload(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("DeleteWorkload") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: assuredworkloads.DeleteWorkloadRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete workload method over HTTP. + + Args: + request (~.assuredworkloads.DeleteWorkloadRequest): + The request object. Request for deleting a Workload. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta1/{name=organizations/*/locations/*/workloads/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_workload(request, metadata) + pb_request = assuredworkloads.DeleteWorkloadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetWorkload(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("GetWorkload") + + def __call__( + self, + request: assuredworkloads.GetWorkloadRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + raise NotImplementedError( + "Method GetWorkload is not available over REST transport" + ) + + class _ListWorkloads(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("ListWorkloads") + + def __call__( + self, + request: assuredworkloads.ListWorkloadsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.ListWorkloadsResponse: + raise NotImplementedError( + "Method ListWorkloads is not available over REST transport" + ) + + class _RestrictAllowedResources(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("RestrictAllowedResources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: assuredworkloads.RestrictAllowedResourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.RestrictAllowedResourcesResponse: + r"""Call the restrict allowed + resources method over HTTP. + + Args: + request (~.assuredworkloads.RestrictAllowedResourcesRequest): + The request object. Request for restricting list of + available resources in Workload + environment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.assuredworkloads.RestrictAllowedResourcesResponse: + Response for restricting the list of + allowed resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{name=organizations/*/locations/*/workloads/*}:restrictAllowedResources", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restrict_allowed_resources( + request, metadata + ) + pb_request = assuredworkloads.RestrictAllowedResourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = assuredworkloads.RestrictAllowedResourcesResponse() + pb_resp = assuredworkloads.RestrictAllowedResourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restrict_allowed_resources(resp) + return resp + + class _UpdateWorkload(AssuredWorkloadsServiceRestStub): + def __hash__(self): + return hash("UpdateWorkload") + + def __call__( + self, + request: assuredworkloads.UpdateWorkloadRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> assuredworkloads.Workload: + raise NotImplementedError( + "Method UpdateWorkload is not available over REST transport" + ) + + @property + def analyze_workload_move( + self, + ) -> Callable[ + [assuredworkloads.AnalyzeWorkloadMoveRequest], + assuredworkloads.AnalyzeWorkloadMoveResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeWorkloadMove(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_workload( + self, + ) -> Callable[[assuredworkloads.CreateWorkloadRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkload(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_workload( + self, + ) -> Callable[[assuredworkloads.DeleteWorkloadRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkload(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_workload( + self, + ) -> Callable[[assuredworkloads.GetWorkloadRequest], assuredworkloads.Workload]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkload(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_workloads( + self, + ) -> Callable[ + [assuredworkloads.ListWorkloadsRequest], assuredworkloads.ListWorkloadsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkloads(self._session, self._host, self._interceptor) # type: ignore + + @property + def restrict_allowed_resources( + self, + ) -> Callable[ + [assuredworkloads.RestrictAllowedResourcesRequest], + assuredworkloads.RestrictAllowedResourcesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestrictAllowedResources(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_workload( + self, + ) -> Callable[[assuredworkloads.UpdateWorkloadRequest], assuredworkloads.Workload]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkload(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AssuredWorkloadsServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=organizations/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(AssuredWorkloadsServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=organizations/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AssuredWorkloadsServiceRestTransport",) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/__init__.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/__init__.py new file mode 100644 index 000000000000..be2ccf305f10 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/__init__.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .assuredworkloads import ( + AnalyzeWorkloadMoveRequest, + AnalyzeWorkloadMoveResponse, + CreateWorkloadOperationMetadata, + CreateWorkloadRequest, + DeleteWorkloadRequest, + GetWorkloadRequest, + ListWorkloadsRequest, + ListWorkloadsResponse, + RestrictAllowedResourcesRequest, + RestrictAllowedResourcesResponse, + UpdateWorkloadRequest, + Workload, +) + +__all__ = ( + "AnalyzeWorkloadMoveRequest", + "AnalyzeWorkloadMoveResponse", + "CreateWorkloadOperationMetadata", + "CreateWorkloadRequest", + "DeleteWorkloadRequest", + "GetWorkloadRequest", + "ListWorkloadsRequest", + "ListWorkloadsResponse", + "RestrictAllowedResourcesRequest", + "RestrictAllowedResourcesResponse", + "UpdateWorkloadRequest", + "Workload", +) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads.py new file mode 100644 index 000000000000..c9cfe9d7fb47 --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads.py @@ -0,0 +1,893 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.assuredworkloads.v1beta1", + manifest={ + "CreateWorkloadRequest", + "UpdateWorkloadRequest", + "RestrictAllowedResourcesRequest", + "RestrictAllowedResourcesResponse", + "DeleteWorkloadRequest", + "GetWorkloadRequest", + "AnalyzeWorkloadMoveRequest", + "AnalyzeWorkloadMoveResponse", + "ListWorkloadsRequest", + "ListWorkloadsResponse", + "Workload", + "CreateWorkloadOperationMetadata", + }, +) + + +class CreateWorkloadRequest(proto.Message): + r"""Request for creating a workload. + + Attributes: + parent (str): + Required. The resource name of the new Workload's parent. + Must be of the form + ``organizations/{org_id}/locations/{location_id}``. + workload (google.cloud.assuredworkloads_v1beta1.types.Workload): + Required. Assured Workload to create + external_id (str): + Optional. A identifier associated with the + workload and underlying projects which allows + for the break down of billing costs for a + workload. The value provided for the identifier + will add a label to the workload and contained + projects with the identifier as the value. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + workload: "Workload" = proto.Field( + proto.MESSAGE, + number=2, + message="Workload", + ) + external_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateWorkloadRequest(proto.Message): + r"""Request for Updating a workload. + + Attributes: + workload (google.cloud.assuredworkloads_v1beta1.types.Workload): + Required. The workload to update. The workload's ``name`` + field is used to identify the workload to be updated. + Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. + """ + + workload: "Workload" = proto.Field( + proto.MESSAGE, + number=1, + message="Workload", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class RestrictAllowedResourcesRequest(proto.Message): + r"""Request for restricting list of available resources in + Workload environment. + + Attributes: + name (str): + Required. The resource name of the Workload. This is the + workloads's relative path in the API, formatted as + "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". + For example, + "organizations/123/locations/us-east1/workloads/assured-workload-1". + restriction_type (google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesRequest.RestrictionType): + Required. The type of restriction for using + gcp products in the Workload environment. + """ + + class RestrictionType(proto.Enum): + r"""The type of restriction. + + Values: + RESTRICTION_TYPE_UNSPECIFIED (0): + Unknown restriction type. + ALLOW_ALL_GCP_RESOURCES (1): + Allow the use all of all gcp products, + irrespective of the compliance posture. This + effectively removes gcp.restrictServiceUsage + OrgPolicy on the AssuredWorkloads Folder. + ALLOW_COMPLIANT_RESOURCES (2): + Based on Workload's compliance regime, + allowed list changes. See - + https://cloud.google.com/assured-workloads/docs/supported-products + for the list of supported resources. + """ + RESTRICTION_TYPE_UNSPECIFIED = 0 + ALLOW_ALL_GCP_RESOURCES = 1 + ALLOW_COMPLIANT_RESOURCES = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + restriction_type: RestrictionType = proto.Field( + proto.ENUM, + number=2, + enum=RestrictionType, + ) + + +class RestrictAllowedResourcesResponse(proto.Message): + r"""Response for restricting the list of allowed resources.""" + + +class DeleteWorkloadRequest(proto.Message): + r"""Request for deleting a Workload. + + Attributes: + name (str): + Required. The ``name`` field is used to identify the + workload. Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + etag (str): + Optional. The etag of the workload. + If this is provided, it must match the server's + etag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetWorkloadRequest(proto.Message): + r"""Request for fetching a workload. + + Attributes: + name (str): + Required. The resource name of the Workload to fetch. This + is the workloads's relative path in the API, formatted as + "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". + For example, + "organizations/123/locations/us-east1/workloads/assured-workload-1". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AnalyzeWorkloadMoveRequest(proto.Message): + r"""A request to analyze a hypothetical move of a source project + or project-based workload to a target (destination) folder-based + workload. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source (str): + The source type is a project-based workload. Specify the + workloads's relative resource name, formatted as: + "organizations/{ORGANIZATION_ID}/locations/{LOCATION_ID}/workloads/{WORKLOAD_ID}" + For example: + "organizations/123/locations/us-east1/workloads/assured-workload-1". + + This field is a member of `oneof`_ ``projectOrWorkloadResource``. + project (str): + The source type is a project. Specify the project's relative + resource name, formatted as either a project number or a + project ID: "projects/{PROJECT_NUMBER}" or + "projects/{PROJECT_ID}" For example: "projects/951040570662" + when specifying a project number, or + "projects/my-project-123" when specifying a project ID. + + This field is a member of `oneof`_ ``projectOrWorkloadResource``. + target (str): + Required. The resource ID of the folder-based destination + workload. This workload is where the source project will + hypothetically be moved to. Specify the workload's relative + resource name, formatted as: + "organizations/{ORGANIZATION_ID}/locations/{LOCATION_ID}/workloads/{WORKLOAD_ID}" + For example: + "organizations/123/locations/us-east1/workloads/assured-workload-2". + """ + + source: str = proto.Field( + proto.STRING, + number=1, + oneof="projectOrWorkloadResource", + ) + project: str = proto.Field( + proto.STRING, + number=3, + oneof="projectOrWorkloadResource", + ) + target: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyzeWorkloadMoveResponse(proto.Message): + r"""A response that includes the analysis of the hypothetical + resource move. + + Attributes: + blockers (MutableSequence[str]): + A list of blockers that should be addressed + before moving the source project or + project-based workload to the destination + folder-based workload. + """ + + blockers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class ListWorkloadsRequest(proto.Message): + r"""Request for fetching workloads in an organization. + + Attributes: + parent (str): + Required. Parent Resource to list workloads from. Must be of + the form ``organizations/{org_id}/locations/{location}``. + page_size (int): + Page size. + page_token (str): + Page token returned from previous request. + Page token contains context from previous + request. Page token needs to be passed in the + second and following requests. + filter (str): + A custom filter for filtering by properties + of a workload. At this time, only filtering by + labels is supported. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListWorkloadsResponse(proto.Message): + r"""Response of ListWorkloads endpoint. + + Attributes: + workloads (MutableSequence[google.cloud.assuredworkloads_v1beta1.types.Workload]): + List of Workloads under a given parent. + next_page_token (str): + The next page token. Return empty if reached + the last page. + """ + + @property + def raw_page(self): + return self + + workloads: MutableSequence["Workload"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Workload", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Workload(proto.Message): + r"""An Workload object for managing highly regulated workloads of + cloud customers. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Optional. The resource name of the workload. + Format: + + organizations/{organization}/locations/{location}/workloads/{workload} + + Read-only. + display_name (str): + Required. The user-assigned display name of + the Workload. When present it must be between 4 + to 30 characters. Allowed characters are: + lowercase and uppercase letters, numbers, + hyphen, and spaces. + + Example: My Workload + resources (MutableSequence[google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo]): + Output only. The resources associated with + this workload. These resources will be created + when creating the workload. If any of the + projects already exist, the workload creation + will fail. Always read only. + compliance_regime (google.cloud.assuredworkloads_v1beta1.types.Workload.ComplianceRegime): + Required. Immutable. Compliance Regime + associated with this workload. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Immutable. The Workload creation + timestamp. + billing_account (str): + Output only. The billing account used for the resources + which are direct children of workload. This billing account + is initially associated with the resources created as part + of Workload creation. After the initial creation of these + resources, the customer can change the assigned billing + account. The resource name has the form + ``billingAccounts/{billing_account_id}``. For example, + ``billingAccounts/012345-567890-ABCDEF``. + il4_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.IL4Settings): + Input only. Immutable. Settings specific to + resources needed for IL4. + + This field is a member of `oneof`_ ``compliance_regime_settings``. + cjis_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.CJISSettings): + Input only. Immutable. Settings specific to + resources needed for CJIS. + + This field is a member of `oneof`_ ``compliance_regime_settings``. + fedramp_high_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.FedrampHighSettings): + Input only. Immutable. Settings specific to + resources needed for FedRAMP High. + + This field is a member of `oneof`_ ``compliance_regime_settings``. + fedramp_moderate_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.FedrampModerateSettings): + Input only. Immutable. Settings specific to + resources needed for FedRAMP Moderate. + + This field is a member of `oneof`_ ``compliance_regime_settings``. + etag (str): + Optional. ETag of the workload, it is + calculated on the basis of the Workload + contents. It will be used in Update & Delete + operations. + labels (MutableMapping[str, str]): + Optional. Labels applied to the workload. + provisioned_resources_parent (str): + Input only. The parent resource for the resources managed by + this Assured Workload. May be either empty or a folder + resource which is a child of the Workload parent. If not + specified all resources are created under the parent + organization. Format: folders/{folder_id} + kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): + Input only. Settings used to create a CMEK crypto key. When + set, a project with a KMS CMEK key is provisioned. This + field is deprecated as of Feb 28, 2022. In order to create a + Keyring, callers should specify, ENCRYPTION_KEYS_PROJECT or + KEYRING in ResourceSettings.resource_type field. + resource_settings (MutableSequence[google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceSettings]): + Input only. Resource properties that are used + to customize workload resources. These + properties (such as custom project id) will be + used to create workload resources if possible. + This field is optional. + kaj_enrollment_state (google.cloud.assuredworkloads_v1beta1.types.Workload.KajEnrollmentState): + Output only. Represents the KAJ enrollment + state of the given workload. + enable_sovereign_controls (bool): + Optional. Indicates the sovereignty status of + the given workload. Currently meant to be used + by Europe/Canada customers. + saa_enrollment_response (google.cloud.assuredworkloads_v1beta1.types.Workload.SaaEnrollmentResponse): + Output only. Represents the SAA enrollment + response of the given workload. SAA enrollment + response is queried during GetWorkload call. In + failure cases, user friendly error message is + shown in SAA details page. + compliant_but_disallowed_services (MutableSequence[str]): + Output only. Urls for services which are + compliant for this Assured Workload, but which + are currently disallowed by the + ResourceUsageRestriction org policy. Invoke + RestrictAllowedResources endpoint to allow your + project developers to use these services in + their environment.". + """ + + class ComplianceRegime(proto.Enum): + r"""Supported Compliance Regimes. + + Values: + COMPLIANCE_REGIME_UNSPECIFIED (0): + Unknown compliance regime. + IL4 (1): + Information protection as per DoD IL4 + requirements. + CJIS (2): + Criminal Justice Information Services (CJIS) + Security policies. + FEDRAMP_HIGH (3): + FedRAMP High data protection controls + FEDRAMP_MODERATE (4): + FedRAMP Moderate data protection controls + US_REGIONAL_ACCESS (5): + Assured Workloads For US Regions data + protection controls + HIPAA (6): + Health Insurance Portability and + Accountability Act controls + HITRUST (7): + Health Information Trust Alliance controls + EU_REGIONS_AND_SUPPORT (8): + Assured Workloads For EU Regions and Support + controls + CA_REGIONS_AND_SUPPORT (9): + Assured Workloads For Canada Regions and + Support controls + ITAR (10): + International Traffic in Arms Regulations + AU_REGIONS_AND_US_SUPPORT (11): + Assured Workloads for Australia Regions and + Support controls + """ + COMPLIANCE_REGIME_UNSPECIFIED = 0 + IL4 = 1 + CJIS = 2 + FEDRAMP_HIGH = 3 + FEDRAMP_MODERATE = 4 + US_REGIONAL_ACCESS = 5 + HIPAA = 6 + HITRUST = 7 + EU_REGIONS_AND_SUPPORT = 8 + CA_REGIONS_AND_SUPPORT = 9 + ITAR = 10 + AU_REGIONS_AND_US_SUPPORT = 11 + + class KajEnrollmentState(proto.Enum): + r"""Key Access Justifications(KAJ) Enrollment State. + + Values: + KAJ_ENROLLMENT_STATE_UNSPECIFIED (0): + Default State for KAJ Enrollment. + KAJ_ENROLLMENT_STATE_PENDING (1): + Pending State for KAJ Enrollment. + KAJ_ENROLLMENT_STATE_COMPLETE (2): + Complete State for KAJ Enrollment. + """ + KAJ_ENROLLMENT_STATE_UNSPECIFIED = 0 + KAJ_ENROLLMENT_STATE_PENDING = 1 + KAJ_ENROLLMENT_STATE_COMPLETE = 2 + + class ResourceInfo(proto.Message): + r"""Represent the resources that are children of this Workload. + + Attributes: + resource_id (int): + Resource identifier. For a project this represents + project_number. + resource_type (google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo.ResourceType): + Indicates the type of resource. + """ + + class ResourceType(proto.Enum): + r"""The type of resource. + + Values: + RESOURCE_TYPE_UNSPECIFIED (0): + Unknown resource type. + CONSUMER_PROJECT (1): + Deprecated. Existing workloads will continue + to support this, but new CreateWorkloadRequests + should not specify this as an input value. + CONSUMER_FOLDER (4): + Consumer Folder. + ENCRYPTION_KEYS_PROJECT (2): + Consumer project containing encryption keys. + KEYRING (3): + Keyring resource that hosts encryption keys. + """ + RESOURCE_TYPE_UNSPECIFIED = 0 + CONSUMER_PROJECT = 1 + CONSUMER_FOLDER = 4 + ENCRYPTION_KEYS_PROJECT = 2 + KEYRING = 3 + + resource_id: int = proto.Field( + proto.INT64, + number=1, + ) + resource_type: "Workload.ResourceInfo.ResourceType" = proto.Field( + proto.ENUM, + number=2, + enum="Workload.ResourceInfo.ResourceType", + ) + + class KMSSettings(proto.Message): + r"""Settings specific to the Key Management Service. + + Attributes: + next_rotation_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Input only. Immutable. The time at + which the Key Management Service will + automatically create a new version of the crypto + key and mark it as the primary. + rotation_period (google.protobuf.duration_pb2.Duration): + Required. Input only. Immutable. [next_rotation_time] will + be advanced by this period when the Key Management Service + automatically rotates a key. Must be at least 24 hours and + at most 876,000 hours. + """ + + next_rotation_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + rotation_period: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + class IL4Settings(proto.Message): + r"""Settings specific to resources needed for IL4. + + Attributes: + kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): + Input only. Immutable. Settings used to + create a CMEK crypto key. + """ + + kms_settings: "Workload.KMSSettings" = proto.Field( + proto.MESSAGE, + number=1, + message="Workload.KMSSettings", + ) + + class CJISSettings(proto.Message): + r"""Settings specific to resources needed for CJIS. + + Attributes: + kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): + Input only. Immutable. Settings used to + create a CMEK crypto key. + """ + + kms_settings: "Workload.KMSSettings" = proto.Field( + proto.MESSAGE, + number=1, + message="Workload.KMSSettings", + ) + + class FedrampHighSettings(proto.Message): + r"""Settings specific to resources needed for FedRAMP High. + + Attributes: + kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): + Input only. Immutable. Settings used to + create a CMEK crypto key. + """ + + kms_settings: "Workload.KMSSettings" = proto.Field( + proto.MESSAGE, + number=1, + message="Workload.KMSSettings", + ) + + class FedrampModerateSettings(proto.Message): + r"""Settings specific to resources needed for FedRAMP Moderate. + + Attributes: + kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): + Input only. Immutable. Settings used to + create a CMEK crypto key. + """ + + kms_settings: "Workload.KMSSettings" = proto.Field( + proto.MESSAGE, + number=1, + message="Workload.KMSSettings", + ) + + class ResourceSettings(proto.Message): + r"""Represent the custom settings for the resources to be + created. + + Attributes: + resource_id (str): + Resource identifier. For a project this represents + project_id. If the project is already taken, the workload + creation will fail. For KeyRing, this represents the + keyring_id. For a folder, don't set this value as folder_id + is assigned by Google. + resource_type (google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo.ResourceType): + Indicates the type of resource. This field should be + specified to correspond the id to the right project type + (CONSUMER_PROJECT or ENCRYPTION_KEYS_PROJECT) + display_name (str): + User-assigned resource display name. + If not empty it will be used to create a + resource with the specified name. + """ + + resource_id: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: "Workload.ResourceInfo.ResourceType" = proto.Field( + proto.ENUM, + number=2, + enum="Workload.ResourceInfo.ResourceType", + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + + class SaaEnrollmentResponse(proto.Message): + r"""Signed Access Approvals (SAA) enrollment response. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + setup_status (google.cloud.assuredworkloads_v1beta1.types.Workload.SaaEnrollmentResponse.SetupState): + Indicates SAA enrollment status of a given + workload. + + This field is a member of `oneof`_ ``_setup_status``. + setup_errors (MutableSequence[google.cloud.assuredworkloads_v1beta1.types.Workload.SaaEnrollmentResponse.SetupError]): + Indicates SAA enrollment setup error if any. + """ + + class SetupState(proto.Enum): + r"""Setup state of SAA enrollment. + + Values: + SETUP_STATE_UNSPECIFIED (0): + Unspecified. + STATUS_PENDING (1): + SAA enrollment pending. + STATUS_COMPLETE (2): + SAA enrollment comopleted. + """ + SETUP_STATE_UNSPECIFIED = 0 + STATUS_PENDING = 1 + STATUS_COMPLETE = 2 + + class SetupError(proto.Enum): + r"""Setup error of SAA enrollment. + + Values: + SETUP_ERROR_UNSPECIFIED (0): + Unspecified. + ERROR_INVALID_BASE_SETUP (1): + Invalid states for all customers, to be + redirected to AA UI for additional details. + ERROR_MISSING_EXTERNAL_SIGNING_KEY (2): + Returned when there is not an EKM key + configured. + ERROR_NOT_ALL_SERVICES_ENROLLED (3): + Returned when there are no enrolled services + or the customer is enrolled in CAA only for a + subset of services. + ERROR_SETUP_CHECK_FAILED (4): + Returned when exception was encountered + during evaluation of other criteria. + """ + SETUP_ERROR_UNSPECIFIED = 0 + ERROR_INVALID_BASE_SETUP = 1 + ERROR_MISSING_EXTERNAL_SIGNING_KEY = 2 + ERROR_NOT_ALL_SERVICES_ENROLLED = 3 + ERROR_SETUP_CHECK_FAILED = 4 + + setup_status: "Workload.SaaEnrollmentResponse.SetupState" = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum="Workload.SaaEnrollmentResponse.SetupState", + ) + setup_errors: MutableSequence[ + "Workload.SaaEnrollmentResponse.SetupError" + ] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="Workload.SaaEnrollmentResponse.SetupError", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + resources: MutableSequence[ResourceInfo] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=ResourceInfo, + ) + compliance_regime: ComplianceRegime = proto.Field( + proto.ENUM, + number=4, + enum=ComplianceRegime, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + billing_account: str = proto.Field( + proto.STRING, + number=6, + ) + il4_settings: IL4Settings = proto.Field( + proto.MESSAGE, + number=7, + oneof="compliance_regime_settings", + message=IL4Settings, + ) + cjis_settings: CJISSettings = proto.Field( + proto.MESSAGE, + number=8, + oneof="compliance_regime_settings", + message=CJISSettings, + ) + fedramp_high_settings: FedrampHighSettings = proto.Field( + proto.MESSAGE, + number=11, + oneof="compliance_regime_settings", + message=FedrampHighSettings, + ) + fedramp_moderate_settings: FedrampModerateSettings = proto.Field( + proto.MESSAGE, + number=12, + oneof="compliance_regime_settings", + message=FedrampModerateSettings, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + provisioned_resources_parent: str = proto.Field( + proto.STRING, + number=13, + ) + kms_settings: KMSSettings = proto.Field( + proto.MESSAGE, + number=14, + message=KMSSettings, + ) + resource_settings: MutableSequence[ResourceSettings] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message=ResourceSettings, + ) + kaj_enrollment_state: KajEnrollmentState = proto.Field( + proto.ENUM, + number=17, + enum=KajEnrollmentState, + ) + enable_sovereign_controls: bool = proto.Field( + proto.BOOL, + number=18, + ) + saa_enrollment_response: SaaEnrollmentResponse = proto.Field( + proto.MESSAGE, + number=20, + message=SaaEnrollmentResponse, + ) + compliant_but_disallowed_services: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=24, + ) + + +class CreateWorkloadOperationMetadata(proto.Message): + r"""Operation metadata to give request details of CreateWorkload. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Time when the operation was + created. + display_name (str): + Optional. The display name of the workload. + parent (str): + Optional. The parent of the workload. + compliance_regime (google.cloud.assuredworkloads_v1beta1.types.Workload.ComplianceRegime): + Optional. Compliance controls that should be + applied to the resources managed by the + workload. + resource_settings (MutableSequence[google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceSettings]): + Optional. Resource properties in the input + that are used for creating/customizing workload + resources. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + parent: str = proto.Field( + proto.STRING, + number=3, + ) + compliance_regime: "Workload.ComplianceRegime" = proto.Field( + proto.ENUM, + number=4, + enum="Workload.ComplianceRegime", + ) + resource_settings: MutableSequence[ + "Workload.ResourceSettings" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Workload.ResourceSettings", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_service.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_service.py new file mode 100644 index 000000000000..b4fe74fbcc9c --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_service.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.assuredworkloads.v1beta1", + manifest={}, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_v1beta1.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_v1beta1.py new file mode 100644 index 000000000000..0fe40b94c45a --- /dev/null +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_v1beta1.py @@ -0,0 +1,650 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.assuredworkloads.v1beta1", + manifest={ + "CreateWorkloadRequest", + "UpdateWorkloadRequest", + "DeleteWorkloadRequest", + "GetWorkloadRequest", + "ListWorkloadsRequest", + "ListWorkloadsResponse", + "Workload", + "CreateWorkloadOperationMetadata", + }, +) + + +class CreateWorkloadRequest(proto.Message): + r"""Request for creating a workload. + + Attributes: + parent (str): + Required. The resource name of the new Workload's parent. + Must be of the form + ``organizations/{org_id}/locations/{location_id}``. + workload (google.cloud.assuredworkloads_v1beta1.types.Workload): + Required. Assured Workload to create + external_id (str): + Optional. A identifier associated with the + workload and underlying projects which allows + for the break down of billing costs for a + workload. The value provided for the identifier + will add a label to the workload and contained + projects with the identifier as the value. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + workload = proto.Field( + proto.MESSAGE, + number=2, + message="Workload", + ) + external_id = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateWorkloadRequest(proto.Message): + r"""Request for Updating a workload. + + Attributes: + workload (google.cloud.assuredworkloads_v1beta1.types.Workload): + Required. The workload to update. The workload’s ``name`` + field is used to identify the workload to be updated. + Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to be updated. + """ + + workload = proto.Field( + proto.MESSAGE, + number=1, + message="Workload", + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteWorkloadRequest(proto.Message): + r"""Request for deleting a Workload. + + Attributes: + name (str): + Required. The ``name`` field is used to identify the + workload. Format: + organizations/{org_id}/locations/{location_id}/workloads/{workload_id} + etag (str): + Optional. The etag of the workload. + If this is provided, it must match the server's + etag. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + etag = proto.Field( + proto.STRING, + number=2, + ) + + +class GetWorkloadRequest(proto.Message): + r"""Request for fetching a workload. + + Attributes: + name (str): + Required. The resource name of the Workload to fetch. This + is the workloads's relative path in the API, formatted as + "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". + For example, + "organizations/123/locations/us-east1/workloads/assured-workload-1". + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListWorkloadsRequest(proto.Message): + r"""Request for fetching workloads in an organization. + + Attributes: + parent (str): + Required. Parent Resource to list workloads from. Must be of + the form ``organizations/{org_id}/locations/{location}``. + page_size (int): + Page size. + page_token (str): + Page token returned from previous request. + Page token contains context from previous + request. Page token needs to be passed in the + second and following requests. + filter (str): + A custom filter for filtering by properties + of a workload. At this time, only filtering by + labels is supported. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=4, + ) + + +class ListWorkloadsResponse(proto.Message): + r"""Response of ListWorkloads endpoint. + + Attributes: + workloads (Sequence[google.cloud.assuredworkloads_v1beta1.types.Workload]): + List of Workloads under a given parent. + next_page_token (str): + The next page token. Return empty if reached + the last page. + """ + + @property + def raw_page(self): + return self + + workloads = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Workload", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class Workload(proto.Message): + r"""An Workload object for managing highly regulated workloads of + cloud customers. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Optional. The resource name of the workload. + Format: + organizations/{organization}/locations/{location}/workloads/{workload} + Read-only. + display_name (str): + Required. The user-assigned display name of + the Workload. When present it must be between 4 + to 30 characters. Allowed characters are: + lowercase and uppercase letters, numbers, + hyphen, and spaces. + + Example: My Workload + resources (Sequence[google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo]): + Output only. The resources associated with + this workload. These resources will be created + when creating the workload. If any of the + projects already exist, the workload creation + will fail. Always read only. + compliance_regime (google.cloud.assuredworkloads_v1beta1.types.Workload.ComplianceRegime): + Required. Immutable. Compliance Regime + associated with this workload. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Immutable. The Workload creation + timestamp. + billing_account (str): + Input only. The billing account used for the resources which + are direct children of workload. This billing account is + initially associated with the resources created as part of + Workload creation. After the initial creation of these + resources, the customer can change the assigned billing + account. The resource name has the form + ``billingAccounts/{billing_account_id}``. For example, + ``billingAccounts/012345-567890-ABCDEF``. + il4_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.IL4Settings): + Required. Input only. Immutable. Settings + specific to resources needed for IL4. + + This field is a member of `oneof`_ ``compliance_regime_settings``. + cjis_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.CJISSettings): + Required. Input only. Immutable. Settings + specific to resources needed for CJIS. + + This field is a member of `oneof`_ ``compliance_regime_settings``. + fedramp_high_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.FedrampHighSettings): + Required. Input only. Immutable. Settings + specific to resources needed for FedRAMP High. + + This field is a member of `oneof`_ ``compliance_regime_settings``. + fedramp_moderate_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.FedrampModerateSettings): + Required. Input only. Immutable. Settings + specific to resources needed for FedRAMP + Moderate. + + This field is a member of `oneof`_ ``compliance_regime_settings``. + etag (str): + Optional. ETag of the workload, it is + calculated on the basis of the Workload + contents. It will be used in Update & Delete + operations. + labels (Mapping[str, str]): + Optional. Labels applied to the workload. + provisioned_resources_parent (str): + Input only. The parent resource for the resources managed by + this Assured Workload. May be either empty or a folder + resource which is a child of the Workload parent. If not + specified all resources are created under the parent + organization. Format: folders/{folder_id} + kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): + Input only. Settings used to create a CMEK + crypto key. When set a project with a KMS CMEK + key is provisioned. This field is mandatory for + a subset of Compliance Regimes. + resource_settings (Sequence[google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceSettings]): + Input only. Resource properties that are used + to customize workload resources. These + properties (such as custom project id) will be + used to create workload resources if possible. + This field is optional. + kaj_enrollment_state (google.cloud.assuredworkloads_v1beta1.types.Workload.KajEnrollmentState): + Output only. Represents the KAJ enrollment + state of the given workload. + enable_sovereign_controls (bool): + Optional. Indicates the sovereignty status of + the given workload. Currently meant to be used + by Europe/Canada customers. + saa_enrollment_response (google.cloud.assuredworkloads_v1beta1.types.Workload.SaaEnrollmentResponse): + Output only. Represents the SAA enrollment + response of the given workload. SAA enrollment + response is queried during GetWorkload call. In + failure cases, user friendly error message is + shown in SAA details page. + """ + + class ComplianceRegime(proto.Enum): + r"""Supported Compliance Regimes.""" + COMPLIANCE_REGIME_UNSPECIFIED = 0 + IL4 = 1 + CJIS = 2 + FEDRAMP_HIGH = 3 + FEDRAMP_MODERATE = 4 + US_REGIONAL_ACCESS = 5 + HIPAA = 6 + HITRUST = 7 + EU_REGIONS_AND_SUPPORT = 8 + CA_REGIONS_AND_SUPPORT = 9 + + class KajEnrollmentState(proto.Enum): + r"""Key Access Justifications(KAJ) Enrollment State.""" + KAJ_ENROLLMENT_STATE_UNSPECIFIED = 0 + KAJ_ENROLLMENT_STATE_PENDING = 1 + KAJ_ENROLLMENT_STATE_COMPLETE = 2 + + class ResourceInfo(proto.Message): + r"""Represent the resources that are children of this Workload. + + Attributes: + resource_id (int): + Resource identifier. For a project this represents + project_number. + resource_type (google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo.ResourceType): + Indicates the type of resource. + """ + + class ResourceType(proto.Enum): + r"""The type of resource.""" + RESOURCE_TYPE_UNSPECIFIED = 0 + CONSUMER_PROJECT = 1 + CONSUMER_FOLDER = 4 + ENCRYPTION_KEYS_PROJECT = 2 + KEYRING = 3 + + resource_id = proto.Field( + proto.INT64, + number=1, + ) + resource_type = proto.Field( + proto.ENUM, + number=2, + enum="Workload.ResourceInfo.ResourceType", + ) + + class KMSSettings(proto.Message): + r"""Settings specific to the Key Management Service. + + Attributes: + next_rotation_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Input only. Immutable. The time at + which the Key Management Service will + automatically create a new version of the crypto + key and mark it as the primary. + rotation_period (google.protobuf.duration_pb2.Duration): + Required. Input only. Immutable. [next_rotation_time] will + be advanced by this period when the Key Management Service + automatically rotates a key. Must be at least 24 hours and + at most 876,000 hours. + """ + + next_rotation_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + rotation_period = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + class IL4Settings(proto.Message): + r"""Settings specific to resources needed for IL4. + + Attributes: + kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): + Required. Input only. Immutable. Settings + used to create a CMEK crypto key. + """ + + kms_settings = proto.Field( + proto.MESSAGE, + number=1, + message="Workload.KMSSettings", + ) + + class CJISSettings(proto.Message): + r"""Settings specific to resources needed for CJIS. + + Attributes: + kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): + Required. Input only. Immutable. Settings + used to create a CMEK crypto key. + """ + + kms_settings = proto.Field( + proto.MESSAGE, + number=1, + message="Workload.KMSSettings", + ) + + class FedrampHighSettings(proto.Message): + r"""Settings specific to resources needed for FedRAMP High. + + Attributes: + kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): + Required. Input only. Immutable. Settings + used to create a CMEK crypto key. + """ + + kms_settings = proto.Field( + proto.MESSAGE, + number=1, + message="Workload.KMSSettings", + ) + + class FedrampModerateSettings(proto.Message): + r"""Settings specific to resources needed for FedRAMP Moderate. + + Attributes: + kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): + Required. Input only. Immutable. Settings + used to create a CMEK crypto key. + """ + + kms_settings = proto.Field( + proto.MESSAGE, + number=1, + message="Workload.KMSSettings", + ) + + class ResourceSettings(proto.Message): + r"""Represent the custom settings for the resources to be + created. + + Attributes: + resource_id (str): + Resource identifier. For a project this represents + project_id. If the project is already taken, the workload + creation will fail. + resource_type (google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo.ResourceType): + Indicates the type of resource. This field should be + specified to correspond the id to the right project type + (CONSUMER_PROJECT or ENCRYPTION_KEYS_PROJECT) + display_name (str): + User-assigned resource display name. + If not empty it will be used to create a + resource with the specified name. + """ + + resource_id = proto.Field( + proto.STRING, + number=1, + ) + resource_type = proto.Field( + proto.ENUM, + number=2, + enum="Workload.ResourceInfo.ResourceType", + ) + display_name = proto.Field( + proto.STRING, + number=3, + ) + + class SaaEnrollmentResponse(proto.Message): + r"""Signed Access Approvals (SAA) enrollment response. + + Attributes: + setup_status (google.cloud.assuredworkloads_v1beta1.types.Workload.SaaEnrollmentResponse.SetupState): + Indicates SAA enrollment status of a given + workload. + + This field is a member of `oneof`_ ``_setup_status``. + setup_errors (Sequence[google.cloud.assuredworkloads_v1beta1.types.Workload.SaaEnrollmentResponse.SetupError]): + Indicates SAA enrollment setup error if any. + """ + + class SetupState(proto.Enum): + r"""Setup state of SAA enrollment.""" + SETUP_STATE_UNSPECIFIED = 0 + STATUS_PENDING = 1 + STATUS_COMPLETE = 2 + + class SetupError(proto.Enum): + r"""Setup error of SAA enrollment.""" + SETUP_ERROR_UNSPECIFIED = 0 + ERROR_INVALID_BASE_SETUP = 1 + ERROR_MISSING_EXTERNAL_SIGNING_KEY = 2 + ERROR_NOT_ALL_SERVICES_ENROLLED = 3 + ERROR_SETUP_CHECK_FAILED = 4 + + setup_status = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum="Workload.SaaEnrollmentResponse.SetupState", + ) + setup_errors = proto.RepeatedField( + proto.ENUM, + number=2, + enum="Workload.SaaEnrollmentResponse.SetupError", + ) + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + resources = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=ResourceInfo, + ) + compliance_regime = proto.Field( + proto.ENUM, + number=4, + enum=ComplianceRegime, + ) + create_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + billing_account = proto.Field( + proto.STRING, + number=6, + ) + il4_settings = proto.Field( + proto.MESSAGE, + number=7, + oneof="compliance_regime_settings", + message=IL4Settings, + ) + cjis_settings = proto.Field( + proto.MESSAGE, + number=8, + oneof="compliance_regime_settings", + message=CJISSettings, + ) + fedramp_high_settings = proto.Field( + proto.MESSAGE, + number=11, + oneof="compliance_regime_settings", + message=FedrampHighSettings, + ) + fedramp_moderate_settings = proto.Field( + proto.MESSAGE, + number=12, + oneof="compliance_regime_settings", + message=FedrampModerateSettings, + ) + etag = proto.Field( + proto.STRING, + number=9, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + provisioned_resources_parent = proto.Field( + proto.STRING, + number=13, + ) + kms_settings = proto.Field( + proto.MESSAGE, + number=14, + message=KMSSettings, + ) + resource_settings = proto.RepeatedField( + proto.MESSAGE, + number=15, + message=ResourceSettings, + ) + kaj_enrollment_state = proto.Field( + proto.ENUM, + number=17, + enum=KajEnrollmentState, + ) + enable_sovereign_controls = proto.Field( + proto.BOOL, + number=18, + ) + saa_enrollment_response = proto.Field( + proto.MESSAGE, + number=20, + message=SaaEnrollmentResponse, + ) + + +class CreateWorkloadOperationMetadata(proto.Message): + r"""Operation metadata to give request details of CreateWorkload. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Time when the operation was + created. + display_name (str): + Optional. The display name of the workload. + parent (str): + Optional. The parent of the workload. + compliance_regime (google.cloud.assuredworkloads_v1beta1.types.Workload.ComplianceRegime): + Optional. Compliance controls that should be + applied to the resources managed by the + workload. + resource_settings (Sequence[google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceSettings]): + Optional. Resource properties in the input + that are used for creating/customizing workload + resources. + """ + + create_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + parent = proto.Field( + proto.STRING, + number=3, + ) + compliance_regime = proto.Field( + proto.ENUM, + number=4, + enum="Workload.ComplianceRegime", + ) + resource_settings = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Workload.ResourceSettings", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-assured-workloads/mypy.ini b/packages/google-cloud-assured-workloads/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-assured-workloads/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-assured-workloads/noxfile.py b/packages/google-cloud-assured-workloads/noxfile.py new file mode 100644 index 000000000000..9a2acd8b6787 --- /dev/null +++ b/packages/google-cloud-assured-workloads/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-assured-workloads/renovate.json b/packages/google-cloud-assured-workloads/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-assured-workloads/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-assured-workloads/scripts/decrypt-secrets.sh b/packages/google-cloud-assured-workloads/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-assured-workloads/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-assured-workloads/scripts/fixup_assuredworkloads_v1_keywords.py b/packages/google-cloud-assured-workloads/scripts/fixup_assuredworkloads_v1_keywords.py new file mode 100644 index 000000000000..9b7bfa9aa390 --- /dev/null +++ b/packages/google-cloud-assured-workloads/scripts/fixup_assuredworkloads_v1_keywords.py @@ -0,0 +1,184 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class assuredworkloadsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'acknowledge_violation': ('name', 'comment', 'non_compliant_org_policy', ), + 'create_workload': ('parent', 'workload', 'external_id', ), + 'delete_workload': ('name', 'etag', ), + 'get_violation': ('name', ), + 'get_workload': ('name', ), + 'list_violations': ('parent', 'interval', 'page_size', 'page_token', 'filter', ), + 'list_workloads': ('parent', 'page_size', 'page_token', 'filter', ), + 'restrict_allowed_resources': ('name', 'restriction_type', ), + 'update_workload': ('workload', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=assuredworkloadsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the assuredworkloads client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-assured-workloads/scripts/fixup_assuredworkloads_v1beta1_keywords.py b/packages/google-cloud-assured-workloads/scripts/fixup_assuredworkloads_v1beta1_keywords.py new file mode 100644 index 000000000000..b91aed313e5a --- /dev/null +++ b/packages/google-cloud-assured-workloads/scripts/fixup_assuredworkloads_v1beta1_keywords.py @@ -0,0 +1,182 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class assuredworkloadsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'analyze_workload_move': ('target', 'source', 'project', ), + 'create_workload': ('parent', 'workload', 'external_id', ), + 'delete_workload': ('name', 'etag', ), + 'get_workload': ('name', ), + 'list_workloads': ('parent', 'page_size', 'page_token', 'filter', ), + 'restrict_allowed_resources': ('name', 'restriction_type', ), + 'update_workload': ('workload', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=assuredworkloadsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the assuredworkloads client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-assured-workloads/scripts/readme-gen/readme_gen.py b/packages/google-cloud-assured-workloads/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-assured-workloads/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-assured-workloads/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-assured-workloads/setup.cfg b/packages/google-cloud-assured-workloads/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-assured-workloads/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-assured-workloads/setup.py b/packages/google-cloud-assured-workloads/setup.py new file mode 100644 index 000000000000..d1a1f80f2906 --- /dev/null +++ b/packages/google-cloud-assured-workloads/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-assured-workloads" + + +description = "Google Cloud Assured Workloads API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/assuredworkloads/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-assured-workloads/testing/.gitignore b/packages/google-cloud-assured-workloads/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-assured-workloads/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-assured-workloads/testing/constraints-3.10.txt b/packages/google-cloud-assured-workloads/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-assured-workloads/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-assured-workloads/testing/constraints-3.11.txt b/packages/google-cloud-assured-workloads/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-assured-workloads/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-assured-workloads/testing/constraints-3.12.txt b/packages/google-cloud-assured-workloads/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-assured-workloads/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-assured-workloads/testing/constraints-3.7.txt b/packages/google-cloud-assured-workloads/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-assured-workloads/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-assured-workloads/testing/constraints-3.8.txt b/packages/google-cloud-assured-workloads/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-assured-workloads/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-assured-workloads/testing/constraints-3.9.txt b/packages/google-cloud-assured-workloads/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-assured-workloads/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-assured-workloads/tests/__init__.py b/packages/google-cloud-assured-workloads/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-assured-workloads/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-assured-workloads/tests/unit/__init__.py b/packages/google-cloud-assured-workloads/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-assured-workloads/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/__init__.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/__init__.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py new file mode 100644 index 000000000000..96e5da71e06c --- /dev/null +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py @@ -0,0 +1,6224 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.assuredworkloads_v1.services.assured_workloads_service import ( + AssuredWorkloadsServiceAsyncClient, + AssuredWorkloadsServiceClient, + pagers, + transports, +) +from google.cloud.assuredworkloads_v1.types import assuredworkloads + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + AssuredWorkloadsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AssuredWorkloadsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AssuredWorkloadsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AssuredWorkloadsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AssuredWorkloadsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AssuredWorkloadsServiceClient, "grpc"), + (AssuredWorkloadsServiceAsyncClient, "grpc_asyncio"), + (AssuredWorkloadsServiceClient, "rest"), + ], +) +def test_assured_workloads_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "assuredworkloads.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://assuredworkloads.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AssuredWorkloadsServiceGrpcTransport, "grpc"), + (transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AssuredWorkloadsServiceRestTransport, "rest"), + ], +) +def test_assured_workloads_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AssuredWorkloadsServiceClient, "grpc"), + (AssuredWorkloadsServiceAsyncClient, "grpc_asyncio"), + (AssuredWorkloadsServiceClient, "rest"), + ], +) +def test_assured_workloads_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "assuredworkloads.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://assuredworkloads.googleapis.com" + ) + + +def test_assured_workloads_service_client_get_transport_class(): + transport = AssuredWorkloadsServiceClient.get_transport_class() + available_transports = [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceRestTransport, + ] + assert transport in available_transports + + transport = AssuredWorkloadsServiceClient.get_transport_class("grpc") + assert transport == transports.AssuredWorkloadsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + AssuredWorkloadsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceClient), +) +@mock.patch.object( + AssuredWorkloadsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceAsyncClient), +) +def test_assured_workloads_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AssuredWorkloadsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AssuredWorkloadsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + "true", + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + "false", + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceRestTransport, + "rest", + "true", + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + AssuredWorkloadsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceClient), +) +@mock.patch.object( + AssuredWorkloadsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_assured_workloads_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [AssuredWorkloadsServiceClient, AssuredWorkloadsServiceAsyncClient] +) +@mock.patch.object( + AssuredWorkloadsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceClient), +) +@mock.patch.object( + AssuredWorkloadsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceAsyncClient), +) +def test_assured_workloads_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceRestTransport, + "rest", + ), + ], +) +def test_assured_workloads_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_assured_workloads_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_assured_workloads_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.assuredworkloads_v1.services.assured_workloads_service.transports.AssuredWorkloadsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AssuredWorkloadsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_assured_workloads_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "assuredworkloads.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="assuredworkloads.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.CreateWorkloadRequest, + dict, + ], +) +def test_create_workload(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.CreateWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_workload_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + client.create_workload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.CreateWorkloadRequest() + + +@pytest.mark.asyncio +async def test_create_workload_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.CreateWorkloadRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.CreateWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_workload_async_from_dict(): + await test_create_workload_async(request_type=dict) + + +def test_create_workload_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.CreateWorkloadRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_workload_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.CreateWorkloadRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_workload_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_workload( + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workload + mock_val = assuredworkloads.Workload(name="name_value") + assert arg == mock_val + + +def test_create_workload_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workload( + assuredworkloads.CreateWorkloadRequest(), + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_workload_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_workload( + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workload + mock_val = assuredworkloads.Workload(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_workload_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_workload( + assuredworkloads.CreateWorkloadRequest(), + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.UpdateWorkloadRequest, + dict, + ], +) +def test_update_workload(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload( + name="name_value", + display_name="display_name_value", + compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, + billing_account="billing_account_value", + etag="etag_value", + provisioned_resources_parent="provisioned_resources_parent_value", + kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, + enable_sovereign_controls=True, + compliant_but_disallowed_services=[ + "compliant_but_disallowed_services_value" + ], + partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, + ) + response = client.update_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.UpdateWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Workload) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 + assert response.billing_account == "billing_account_value" + assert response.etag == "etag_value" + assert response.provisioned_resources_parent == "provisioned_resources_parent_value" + assert ( + response.kaj_enrollment_state + == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING + ) + assert response.enable_sovereign_controls is True + assert response.compliant_but_disallowed_services == [ + "compliant_but_disallowed_services_value" + ] + assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS + + +def test_update_workload_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + client.update_workload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.UpdateWorkloadRequest() + + +@pytest.mark.asyncio +async def test_update_workload_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.UpdateWorkloadRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Workload( + name="name_value", + display_name="display_name_value", + compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, + billing_account="billing_account_value", + etag="etag_value", + provisioned_resources_parent="provisioned_resources_parent_value", + kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, + enable_sovereign_controls=True, + compliant_but_disallowed_services=[ + "compliant_but_disallowed_services_value" + ], + partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, + ) + ) + response = await client.update_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.UpdateWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Workload) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 + assert response.billing_account == "billing_account_value" + assert response.etag == "etag_value" + assert response.provisioned_resources_parent == "provisioned_resources_parent_value" + assert ( + response.kaj_enrollment_state + == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING + ) + assert response.enable_sovereign_controls is True + assert response.compliant_but_disallowed_services == [ + "compliant_but_disallowed_services_value" + ] + assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS + + +@pytest.mark.asyncio +async def test_update_workload_async_from_dict(): + await test_update_workload_async(request_type=dict) + + +def test_update_workload_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.UpdateWorkloadRequest() + + request.workload.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + call.return_value = assuredworkloads.Workload() + client.update_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workload.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_workload_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.UpdateWorkloadRequest() + + request.workload.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Workload() + ) + await client.update_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "workload.name=name_value", + ) in kw["metadata"] + + +def test_update_workload_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_workload( + workload=assuredworkloads.Workload(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].workload + mock_val = assuredworkloads.Workload(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_workload_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workload( + assuredworkloads.UpdateWorkloadRequest(), + workload=assuredworkloads.Workload(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_workload_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Workload() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_workload( + workload=assuredworkloads.Workload(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].workload + mock_val = assuredworkloads.Workload(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_workload_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_workload( + assuredworkloads.UpdateWorkloadRequest(), + workload=assuredworkloads.Workload(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.RestrictAllowedResourcesRequest, + dict, + ], +) +def test_restrict_allowed_resources(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restrict_allowed_resources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.RestrictAllowedResourcesResponse() + response = client.restrict_allowed_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.RestrictAllowedResourcesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) + + +def test_restrict_allowed_resources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restrict_allowed_resources), "__call__" + ) as call: + client.restrict_allowed_resources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.RestrictAllowedResourcesRequest() + + +@pytest.mark.asyncio +async def test_restrict_allowed_resources_async( + transport: str = "grpc_asyncio", + request_type=assuredworkloads.RestrictAllowedResourcesRequest, +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restrict_allowed_resources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.RestrictAllowedResourcesResponse() + ) + response = await client.restrict_allowed_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.RestrictAllowedResourcesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) + + +@pytest.mark.asyncio +async def test_restrict_allowed_resources_async_from_dict(): + await test_restrict_allowed_resources_async(request_type=dict) + + +def test_restrict_allowed_resources_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.RestrictAllowedResourcesRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restrict_allowed_resources), "__call__" + ) as call: + call.return_value = assuredworkloads.RestrictAllowedResourcesResponse() + client.restrict_allowed_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restrict_allowed_resources_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.RestrictAllowedResourcesRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restrict_allowed_resources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.RestrictAllowedResourcesResponse() + ) + await client.restrict_allowed_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.DeleteWorkloadRequest, + dict, + ], +) +def test_delete_workload(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.DeleteWorkloadRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_workload_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + client.delete_workload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.DeleteWorkloadRequest() + + +@pytest.mark.asyncio +async def test_delete_workload_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.DeleteWorkloadRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.DeleteWorkloadRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_workload_async_from_dict(): + await test_delete_workload_async(request_type=dict) + + +def test_delete_workload_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.DeleteWorkloadRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + call.return_value = None + client.delete_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_workload_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.DeleteWorkloadRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_workload_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_workload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_workload_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workload( + assuredworkloads.DeleteWorkloadRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_workload_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_workload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_workload_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_workload( + assuredworkloads.DeleteWorkloadRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.GetWorkloadRequest, + dict, + ], +) +def test_get_workload(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload( + name="name_value", + display_name="display_name_value", + compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, + billing_account="billing_account_value", + etag="etag_value", + provisioned_resources_parent="provisioned_resources_parent_value", + kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, + enable_sovereign_controls=True, + compliant_but_disallowed_services=[ + "compliant_but_disallowed_services_value" + ], + partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, + ) + response = client.get_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.GetWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Workload) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 + assert response.billing_account == "billing_account_value" + assert response.etag == "etag_value" + assert response.provisioned_resources_parent == "provisioned_resources_parent_value" + assert ( + response.kaj_enrollment_state + == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING + ) + assert response.enable_sovereign_controls is True + assert response.compliant_but_disallowed_services == [ + "compliant_but_disallowed_services_value" + ] + assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS + + +def test_get_workload_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + client.get_workload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.GetWorkloadRequest() + + +@pytest.mark.asyncio +async def test_get_workload_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.GetWorkloadRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Workload( + name="name_value", + display_name="display_name_value", + compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, + billing_account="billing_account_value", + etag="etag_value", + provisioned_resources_parent="provisioned_resources_parent_value", + kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, + enable_sovereign_controls=True, + compliant_but_disallowed_services=[ + "compliant_but_disallowed_services_value" + ], + partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, + ) + ) + response = await client.get_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.GetWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Workload) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 + assert response.billing_account == "billing_account_value" + assert response.etag == "etag_value" + assert response.provisioned_resources_parent == "provisioned_resources_parent_value" + assert ( + response.kaj_enrollment_state + == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING + ) + assert response.enable_sovereign_controls is True + assert response.compliant_but_disallowed_services == [ + "compliant_but_disallowed_services_value" + ] + assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS + + +@pytest.mark.asyncio +async def test_get_workload_async_from_dict(): + await test_get_workload_async(request_type=dict) + + +def test_get_workload_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.GetWorkloadRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + call.return_value = assuredworkloads.Workload() + client.get_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_workload_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.GetWorkloadRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Workload() + ) + await client.get_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_workload_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_workload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_workload_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workload( + assuredworkloads.GetWorkloadRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_workload_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Workload() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_workload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_workload_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_workload( + assuredworkloads.GetWorkloadRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.ListWorkloadsRequest, + dict, + ], +) +def test_list_workloads(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.ListWorkloadsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_workloads(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.ListWorkloadsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkloadsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_workloads_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + client.list_workloads() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.ListWorkloadsRequest() + + +@pytest.mark.asyncio +async def test_list_workloads_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.ListWorkloadsRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.ListWorkloadsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_workloads(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.ListWorkloadsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkloadsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_workloads_async_from_dict(): + await test_list_workloads_async(request_type=dict) + + +def test_list_workloads_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.ListWorkloadsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + call.return_value = assuredworkloads.ListWorkloadsResponse() + client.list_workloads(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_workloads_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.ListWorkloadsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.ListWorkloadsResponse() + ) + await client.list_workloads(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_workloads_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.ListWorkloadsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_workloads( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_workloads_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workloads( + assuredworkloads.ListWorkloadsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_workloads_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.ListWorkloadsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.ListWorkloadsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_workloads( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_workloads_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_workloads( + assuredworkloads.ListWorkloadsRequest(), + parent="parent_value", + ) + + +def test_list_workloads_pager(transport_name: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + next_page_token="abc", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[], + next_page_token="def", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_workloads(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assuredworkloads.Workload) for i in results) + + +def test_list_workloads_pages(transport_name: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + next_page_token="abc", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[], + next_page_token="def", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + ), + RuntimeError, + ) + pages = list(client.list_workloads(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_workloads_async_pager(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workloads), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + next_page_token="abc", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[], + next_page_token="def", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_workloads( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, assuredworkloads.Workload) for i in responses) + + +@pytest.mark.asyncio +async def test_list_workloads_async_pages(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workloads), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + next_page_token="abc", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[], + next_page_token="def", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_workloads(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.ListViolationsRequest, + dict, + ], +) +def test_list_violations(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_violations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.ListViolationsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_violations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.ListViolationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViolationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_violations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_violations), "__call__") as call: + client.list_violations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.ListViolationsRequest() + + +@pytest.mark.asyncio +async def test_list_violations_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.ListViolationsRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_violations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.ListViolationsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_violations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.ListViolationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViolationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_violations_async_from_dict(): + await test_list_violations_async(request_type=dict) + + +def test_list_violations_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_violations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.ListViolationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_violations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_violations_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_violations( + assuredworkloads.ListViolationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_violations_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_violations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.ListViolationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.ListViolationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_violations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_violations_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_violations( + assuredworkloads.ListViolationsRequest(), + parent="parent_value", + ) + + +def test_list_violations_pager(transport_name: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_violations), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + assuredworkloads.Violation(), + assuredworkloads.Violation(), + ], + next_page_token="abc", + ), + assuredworkloads.ListViolationsResponse( + violations=[], + next_page_token="def", + ), + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + assuredworkloads.Violation(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_violations(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assuredworkloads.Violation) for i in results) + + +def test_list_violations_pages(transport_name: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_violations), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + assuredworkloads.Violation(), + assuredworkloads.Violation(), + ], + next_page_token="abc", + ), + assuredworkloads.ListViolationsResponse( + violations=[], + next_page_token="def", + ), + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + assuredworkloads.Violation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_violations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_violations_async_pager(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_violations), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + assuredworkloads.Violation(), + assuredworkloads.Violation(), + ], + next_page_token="abc", + ), + assuredworkloads.ListViolationsResponse( + violations=[], + next_page_token="def", + ), + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + assuredworkloads.Violation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_violations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, assuredworkloads.Violation) for i in responses) + + +@pytest.mark.asyncio +async def test_list_violations_async_pages(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_violations), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + assuredworkloads.Violation(), + assuredworkloads.Violation(), + ], + next_page_token="abc", + ), + assuredworkloads.ListViolationsResponse( + violations=[], + next_page_token="def", + ), + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListViolationsResponse( + violations=[ + assuredworkloads.Violation(), + assuredworkloads.Violation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_violations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.GetViolationRequest, + dict, + ], +) +def test_get_violation(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_violation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Violation( + name="name_value", + description="description_value", + category="category_value", + state=assuredworkloads.Violation.State.RESOLVED, + org_policy_constraint="org_policy_constraint_value", + audit_log_link="audit_log_link_value", + non_compliant_org_policy="non_compliant_org_policy_value", + acknowledged=True, + exception_audit_log_link="exception_audit_log_link_value", + ) + response = client.get_violation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.GetViolationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Violation) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.category == "category_value" + assert response.state == assuredworkloads.Violation.State.RESOLVED + assert response.org_policy_constraint == "org_policy_constraint_value" + assert response.audit_log_link == "audit_log_link_value" + assert response.non_compliant_org_policy == "non_compliant_org_policy_value" + assert response.acknowledged is True + assert response.exception_audit_log_link == "exception_audit_log_link_value" + + +def test_get_violation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_violation), "__call__") as call: + client.get_violation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.GetViolationRequest() + + +@pytest.mark.asyncio +async def test_get_violation_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.GetViolationRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_violation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Violation( + name="name_value", + description="description_value", + category="category_value", + state=assuredworkloads.Violation.State.RESOLVED, + org_policy_constraint="org_policy_constraint_value", + audit_log_link="audit_log_link_value", + non_compliant_org_policy="non_compliant_org_policy_value", + acknowledged=True, + exception_audit_log_link="exception_audit_log_link_value", + ) + ) + response = await client.get_violation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.GetViolationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Violation) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.category == "category_value" + assert response.state == assuredworkloads.Violation.State.RESOLVED + assert response.org_policy_constraint == "org_policy_constraint_value" + assert response.audit_log_link == "audit_log_link_value" + assert response.non_compliant_org_policy == "non_compliant_org_policy_value" + assert response.acknowledged is True + assert response.exception_audit_log_link == "exception_audit_log_link_value" + + +@pytest.mark.asyncio +async def test_get_violation_async_from_dict(): + await test_get_violation_async(request_type=dict) + + +def test_get_violation_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_violation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Violation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_violation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_violation_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_violation( + assuredworkloads.GetViolationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_violation_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_violation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Violation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Violation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_violation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_violation_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_violation( + assuredworkloads.GetViolationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.AcknowledgeViolationRequest, + dict, + ], +) +def test_acknowledge_violation(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge_violation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.AcknowledgeViolationResponse() + response = client.acknowledge_violation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.AcknowledgeViolationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.AcknowledgeViolationResponse) + + +def test_acknowledge_violation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge_violation), "__call__" + ) as call: + client.acknowledge_violation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.AcknowledgeViolationRequest() + + +@pytest.mark.asyncio +async def test_acknowledge_violation_async( + transport: str = "grpc_asyncio", + request_type=assuredworkloads.AcknowledgeViolationRequest, +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge_violation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.AcknowledgeViolationResponse() + ) + response = await client.acknowledge_violation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.AcknowledgeViolationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.AcknowledgeViolationResponse) + + +@pytest.mark.asyncio +async def test_acknowledge_violation_async_from_dict(): + await test_acknowledge_violation_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.CreateWorkloadRequest, + dict, + ], +) +def test_create_workload_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init["workload"] = { + "name": "name_value", + "display_name": "display_name_value", + "resources": [{"resource_id": 1172, "resource_type": 1}], + "compliance_regime": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "billing_account": "billing_account_value", + "etag": "etag_value", + "labels": {}, + "provisioned_resources_parent": "provisioned_resources_parent_value", + "kms_settings": { + "next_rotation_time": {}, + "rotation_period": {"seconds": 751, "nanos": 543}, + }, + "resource_settings": [ + { + "resource_id": "resource_id_value", + "resource_type": 1, + "display_name": "display_name_value", + } + ], + "kaj_enrollment_state": 1, + "enable_sovereign_controls": True, + "saa_enrollment_response": {"setup_status": 1, "setup_errors": [1]}, + "compliant_but_disallowed_services": [ + "compliant_but_disallowed_services_value1", + "compliant_but_disallowed_services_value2", + ], + "partner": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_workload(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_workload_rest_required_fields( + request_type=assuredworkloads.CreateWorkloadRequest, +): + transport_class = transports.AssuredWorkloadsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workload._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workload._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("external_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_workload(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_workload_rest_unset_required_fields(): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_workload._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("externalId",)) + & set( + ( + "parent", + "workload", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_workload_rest_interceptors(null_interceptor): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AssuredWorkloadsServiceRestInterceptor(), + ) + client = AssuredWorkloadsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "post_create_workload" + ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "pre_create_workload" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = assuredworkloads.CreateWorkloadRequest.pb( + assuredworkloads.CreateWorkloadRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = assuredworkloads.CreateWorkloadRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_workload( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_workload_rest_bad_request( + transport: str = "rest", request_type=assuredworkloads.CreateWorkloadRequest +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init["workload"] = { + "name": "name_value", + "display_name": "display_name_value", + "resources": [{"resource_id": 1172, "resource_type": 1}], + "compliance_regime": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "billing_account": "billing_account_value", + "etag": "etag_value", + "labels": {}, + "provisioned_resources_parent": "provisioned_resources_parent_value", + "kms_settings": { + "next_rotation_time": {}, + "rotation_period": {"seconds": 751, "nanos": 543}, + }, + "resource_settings": [ + { + "resource_id": "resource_id_value", + "resource_type": 1, + "display_name": "display_name_value", + } + ], + "kaj_enrollment_state": 1, + "enable_sovereign_controls": True, + "saa_enrollment_response": {"setup_status": 1, "setup_errors": [1]}, + "compliant_but_disallowed_services": [ + "compliant_but_disallowed_services_value1", + "compliant_but_disallowed_services_value2", + ], + "partner": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_workload(request) + + +def test_create_workload_rest_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_workload(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=organizations/*/locations/*}/workloads" + % client.transport._host, + args[1], + ) + + +def test_create_workload_rest_flattened_error(transport: str = "rest"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workload( + assuredworkloads.CreateWorkloadRequest(), + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + + +def test_create_workload_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.UpdateWorkloadRequest, + dict, + ], +) +def test_update_workload_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "workload": { + "name": "organizations/sample1/locations/sample2/workloads/sample3" + } + } + request_init["workload"] = { + "name": "organizations/sample1/locations/sample2/workloads/sample3", + "display_name": "display_name_value", + "resources": [{"resource_id": 1172, "resource_type": 1}], + "compliance_regime": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "billing_account": "billing_account_value", + "etag": "etag_value", + "labels": {}, + "provisioned_resources_parent": "provisioned_resources_parent_value", + "kms_settings": { + "next_rotation_time": {}, + "rotation_period": {"seconds": 751, "nanos": 543}, + }, + "resource_settings": [ + { + "resource_id": "resource_id_value", + "resource_type": 1, + "display_name": "display_name_value", + } + ], + "kaj_enrollment_state": 1, + "enable_sovereign_controls": True, + "saa_enrollment_response": {"setup_status": 1, "setup_errors": [1]}, + "compliant_but_disallowed_services": [ + "compliant_but_disallowed_services_value1", + "compliant_but_disallowed_services_value2", + ], + "partner": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.Workload( + name="name_value", + display_name="display_name_value", + compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, + billing_account="billing_account_value", + etag="etag_value", + provisioned_resources_parent="provisioned_resources_parent_value", + kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, + enable_sovereign_controls=True, + compliant_but_disallowed_services=[ + "compliant_but_disallowed_services_value" + ], + partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = assuredworkloads.Workload.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_workload(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Workload) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 + assert response.billing_account == "billing_account_value" + assert response.etag == "etag_value" + assert response.provisioned_resources_parent == "provisioned_resources_parent_value" + assert ( + response.kaj_enrollment_state + == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING + ) + assert response.enable_sovereign_controls is True + assert response.compliant_but_disallowed_services == [ + "compliant_but_disallowed_services_value" + ] + assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS + + +def test_update_workload_rest_required_fields( + request_type=assuredworkloads.UpdateWorkloadRequest, +): + transport_class = transports.AssuredWorkloadsServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workload._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workload._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.Workload() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = assuredworkloads.Workload.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_workload(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_workload_rest_unset_required_fields(): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_workload._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "workload", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_workload_rest_interceptors(null_interceptor): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AssuredWorkloadsServiceRestInterceptor(), + ) + client = AssuredWorkloadsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "post_update_workload" + ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "pre_update_workload" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = assuredworkloads.UpdateWorkloadRequest.pb( + assuredworkloads.UpdateWorkloadRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = assuredworkloads.Workload.to_json( + assuredworkloads.Workload() + ) + + request = assuredworkloads.UpdateWorkloadRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = assuredworkloads.Workload() + + client.update_workload( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_workload_rest_bad_request( + transport: str = "rest", request_type=assuredworkloads.UpdateWorkloadRequest +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "workload": { + "name": "organizations/sample1/locations/sample2/workloads/sample3" + } + } + request_init["workload"] = { + "name": "organizations/sample1/locations/sample2/workloads/sample3", + "display_name": "display_name_value", + "resources": [{"resource_id": 1172, "resource_type": 1}], + "compliance_regime": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "billing_account": "billing_account_value", + "etag": "etag_value", + "labels": {}, + "provisioned_resources_parent": "provisioned_resources_parent_value", + "kms_settings": { + "next_rotation_time": {}, + "rotation_period": {"seconds": 751, "nanos": 543}, + }, + "resource_settings": [ + { + "resource_id": "resource_id_value", + "resource_type": 1, + "display_name": "display_name_value", + } + ], + "kaj_enrollment_state": 1, + "enable_sovereign_controls": True, + "saa_enrollment_response": {"setup_status": 1, "setup_errors": [1]}, + "compliant_but_disallowed_services": [ + "compliant_but_disallowed_services_value1", + "compliant_but_disallowed_services_value2", + ], + "partner": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_workload(request) + + +def test_update_workload_rest_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.Workload() + + # get arguments that satisfy an http rule for this method + sample_request = { + "workload": { + "name": "organizations/sample1/locations/sample2/workloads/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + workload=assuredworkloads.Workload(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = assuredworkloads.Workload.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_workload(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{workload.name=organizations/*/locations/*/workloads/*}" + % client.transport._host, + args[1], + ) + + +def test_update_workload_rest_flattened_error(transport: str = "rest"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workload( + assuredworkloads.UpdateWorkloadRequest(), + workload=assuredworkloads.Workload(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_workload_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.RestrictAllowedResourcesRequest, + dict, + ], +) +def test_restrict_allowed_resources_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/workloads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.RestrictAllowedResourcesResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = assuredworkloads.RestrictAllowedResourcesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restrict_allowed_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) + + +def test_restrict_allowed_resources_rest_required_fields( + request_type=assuredworkloads.RestrictAllowedResourcesRequest, +): + transport_class = transports.AssuredWorkloadsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restrict_allowed_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restrict_allowed_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.RestrictAllowedResourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = assuredworkloads.RestrictAllowedResourcesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restrict_allowed_resources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restrict_allowed_resources_rest_unset_required_fields(): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restrict_allowed_resources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "restrictionType", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restrict_allowed_resources_rest_interceptors(null_interceptor): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AssuredWorkloadsServiceRestInterceptor(), + ) + client = AssuredWorkloadsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, + "post_restrict_allowed_resources", + ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, + "pre_restrict_allowed_resources", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = assuredworkloads.RestrictAllowedResourcesRequest.pb( + assuredworkloads.RestrictAllowedResourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + assuredworkloads.RestrictAllowedResourcesResponse.to_json( + assuredworkloads.RestrictAllowedResourcesResponse() + ) + ) + + request = assuredworkloads.RestrictAllowedResourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = assuredworkloads.RestrictAllowedResourcesResponse() + + client.restrict_allowed_resources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restrict_allowed_resources_rest_bad_request( + transport: str = "rest", + request_type=assuredworkloads.RestrictAllowedResourcesRequest, +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/workloads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restrict_allowed_resources(request) + + +def test_restrict_allowed_resources_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.DeleteWorkloadRequest, + dict, + ], +) +def test_delete_workload_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/workloads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_workload(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_workload_rest_required_fields( + request_type=assuredworkloads.DeleteWorkloadRequest, +): + transport_class = transports.AssuredWorkloadsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workload._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workload._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_workload(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_workload_rest_unset_required_fields(): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_workload._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_workload_rest_interceptors(null_interceptor): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AssuredWorkloadsServiceRestInterceptor(), + ) + client = AssuredWorkloadsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "pre_delete_workload" + ) as pre: + pre.assert_not_called() + pb_message = assuredworkloads.DeleteWorkloadRequest.pb( + assuredworkloads.DeleteWorkloadRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = assuredworkloads.DeleteWorkloadRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_workload( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_workload_rest_bad_request( + transport: str = "rest", request_type=assuredworkloads.DeleteWorkloadRequest +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/workloads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_workload(request) + + +def test_delete_workload_rest_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/workloads/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_workload(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=organizations/*/locations/*/workloads/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_workload_rest_flattened_error(transport: str = "rest"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workload( + assuredworkloads.DeleteWorkloadRequest(), + name="name_value", + ) + + +def test_delete_workload_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.GetWorkloadRequest, + dict, + ], +) +def test_get_workload_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/workloads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.Workload( + name="name_value", + display_name="display_name_value", + compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, + billing_account="billing_account_value", + etag="etag_value", + provisioned_resources_parent="provisioned_resources_parent_value", + kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, + enable_sovereign_controls=True, + compliant_but_disallowed_services=[ + "compliant_but_disallowed_services_value" + ], + partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = assuredworkloads.Workload.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_workload(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Workload) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 + assert response.billing_account == "billing_account_value" + assert response.etag == "etag_value" + assert response.provisioned_resources_parent == "provisioned_resources_parent_value" + assert ( + response.kaj_enrollment_state + == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING + ) + assert response.enable_sovereign_controls is True + assert response.compliant_but_disallowed_services == [ + "compliant_but_disallowed_services_value" + ] + assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS + + +def test_get_workload_rest_required_fields( + request_type=assuredworkloads.GetWorkloadRequest, +): + transport_class = transports.AssuredWorkloadsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workload._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workload._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.Workload() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = assuredworkloads.Workload.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_workload(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_workload_rest_unset_required_fields(): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_workload._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_workload_rest_interceptors(null_interceptor): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AssuredWorkloadsServiceRestInterceptor(), + ) + client = AssuredWorkloadsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "post_get_workload" + ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "pre_get_workload" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = assuredworkloads.GetWorkloadRequest.pb( + assuredworkloads.GetWorkloadRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = assuredworkloads.Workload.to_json( + assuredworkloads.Workload() + ) + + request = assuredworkloads.GetWorkloadRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = assuredworkloads.Workload() + + client.get_workload( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_workload_rest_bad_request( + transport: str = "rest", request_type=assuredworkloads.GetWorkloadRequest +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/workloads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_workload(request) + + +def test_get_workload_rest_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.Workload() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/workloads/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = assuredworkloads.Workload.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_workload(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=organizations/*/locations/*/workloads/*}" + % client.transport._host, + args[1], + ) + + +def test_get_workload_rest_flattened_error(transport: str = "rest"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workload( + assuredworkloads.GetWorkloadRequest(), + name="name_value", + ) + + +def test_get_workload_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.ListWorkloadsRequest, + dict, + ], +) +def test_list_workloads_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.ListWorkloadsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = assuredworkloads.ListWorkloadsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_workloads(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkloadsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_workloads_rest_required_fields( + request_type=assuredworkloads.ListWorkloadsRequest, +): + transport_class = transports.AssuredWorkloadsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workloads._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workloads._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.ListWorkloadsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = assuredworkloads.ListWorkloadsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_workloads(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_workloads_rest_unset_required_fields(): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_workloads._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_workloads_rest_interceptors(null_interceptor): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AssuredWorkloadsServiceRestInterceptor(), + ) + client = AssuredWorkloadsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "post_list_workloads" + ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "pre_list_workloads" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = assuredworkloads.ListWorkloadsRequest.pb( + assuredworkloads.ListWorkloadsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = assuredworkloads.ListWorkloadsResponse.to_json( + assuredworkloads.ListWorkloadsResponse() + ) + + request = assuredworkloads.ListWorkloadsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = assuredworkloads.ListWorkloadsResponse() + + client.list_workloads( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_workloads_rest_bad_request( + transport: str = "rest", request_type=assuredworkloads.ListWorkloadsRequest +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_workloads(request) + + +def test_list_workloads_rest_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.ListWorkloadsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = assuredworkloads.ListWorkloadsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_workloads(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=organizations/*/locations/*}/workloads" + % client.transport._host, + args[1], + ) + + +def test_list_workloads_rest_flattened_error(transport: str = "rest"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workloads( + assuredworkloads.ListWorkloadsRequest(), + parent="parent_value", + ) + + +def test_list_workloads_rest_pager(transport: str = "rest"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + next_page_token="abc", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[], + next_page_token="def", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + assuredworkloads.ListWorkloadsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_workloads(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assuredworkloads.Workload) for i in results) + + pages = list(client.list_workloads(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_violations_rest_no_http_options(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = assuredworkloads.ListViolationsRequest() + with pytest.raises(RuntimeError): + client.list_violations(request) + + +def test_get_violation_rest_no_http_options(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = assuredworkloads.GetViolationRequest() + with pytest.raises(RuntimeError): + client.get_violation(request) + + +def test_acknowledge_violation_rest_no_http_options(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = assuredworkloads.AcknowledgeViolationRequest() + with pytest.raises(RuntimeError): + client.acknowledge_violation(request) + + +def test_list_violations_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.list_violations({}) + assert "Method ListViolations is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_get_violation_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.get_violation({}) + assert "Method GetViolation is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_acknowledge_violation_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.acknowledge_violation({}) + assert "Method AcknowledgeViolation is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssuredWorkloadsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AssuredWorkloadsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AssuredWorkloadsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssuredWorkloadsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AssuredWorkloadsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AssuredWorkloadsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + transports.AssuredWorkloadsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AssuredWorkloadsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AssuredWorkloadsServiceGrpcTransport, + ) + + +def test_assured_workloads_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AssuredWorkloadsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_assured_workloads_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.assuredworkloads_v1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AssuredWorkloadsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_workload", + "update_workload", + "restrict_allowed_resources", + "delete_workload", + "get_workload", + "list_workloads", + "list_violations", + "get_violation", + "acknowledge_violation", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_assured_workloads_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.assuredworkloads_v1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AssuredWorkloadsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_assured_workloads_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.assuredworkloads_v1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AssuredWorkloadsServiceTransport() + adc.assert_called_once() + + +def test_assured_workloads_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AssuredWorkloadsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + ], +) +def test_assured_workloads_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + transports.AssuredWorkloadsServiceRestTransport, + ], +) +def test_assured_workloads_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AssuredWorkloadsServiceGrpcTransport, grpc_helpers), + (transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_assured_workloads_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "assuredworkloads.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="assuredworkloads.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + ], +) +def test_assured_workloads_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_assured_workloads_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AssuredWorkloadsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_assured_workloads_service_rest_lro_client(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_assured_workloads_service_host_no_port(transport_name): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="assuredworkloads.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "assuredworkloads.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://assuredworkloads.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_assured_workloads_service_host_with_port(transport_name): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="assuredworkloads.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "assuredworkloads.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://assuredworkloads.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_assured_workloads_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AssuredWorkloadsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AssuredWorkloadsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_workload._session + session2 = client2.transport.create_workload._session + assert session1 != session2 + session1 = client1.transport.update_workload._session + session2 = client2.transport.update_workload._session + assert session1 != session2 + session1 = client1.transport.restrict_allowed_resources._session + session2 = client2.transport.restrict_allowed_resources._session + assert session1 != session2 + session1 = client1.transport.delete_workload._session + session2 = client2.transport.delete_workload._session + assert session1 != session2 + session1 = client1.transport.get_workload._session + session2 = client2.transport.get_workload._session + assert session1 != session2 + session1 = client1.transport.list_workloads._session + session2 = client2.transport.list_workloads._session + assert session1 != session2 + session1 = client1.transport.list_violations._session + session2 = client2.transport.list_violations._session + assert session1 != session2 + session1 = client1.transport.get_violation._session + session2 = client2.transport.get_violation._session + assert session1 != session2 + session1 = client1.transport.acknowledge_violation._session + session2 = client2.transport.acknowledge_violation._session + assert session1 != session2 + + +def test_assured_workloads_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_assured_workloads_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AssuredWorkloadsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + ], +) +def test_assured_workloads_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + ], +) +def test_assured_workloads_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_assured_workloads_service_grpc_lro_client(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_assured_workloads_service_grpc_lro_async_client(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_violation_path(): + organization = "squid" + location = "clam" + workload = "whelk" + violation = "octopus" + expected = "organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation}".format( + organization=organization, + location=location, + workload=workload, + violation=violation, + ) + actual = AssuredWorkloadsServiceClient.violation_path( + organization, location, workload, violation + ) + assert expected == actual + + +def test_parse_violation_path(): + expected = { + "organization": "oyster", + "location": "nudibranch", + "workload": "cuttlefish", + "violation": "mussel", + } + path = AssuredWorkloadsServiceClient.violation_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_violation_path(path) + assert expected == actual + + +def test_workload_path(): + organization = "winkle" + location = "nautilus" + workload = "scallop" + expected = ( + "organizations/{organization}/locations/{location}/workloads/{workload}".format( + organization=organization, + location=location, + workload=workload, + ) + ) + actual = AssuredWorkloadsServiceClient.workload_path( + organization, location, workload + ) + assert expected == actual + + +def test_parse_workload_path(): + expected = { + "organization": "abalone", + "location": "squid", + "workload": "clam", + } + path = AssuredWorkloadsServiceClient.workload_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_workload_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AssuredWorkloadsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = AssuredWorkloadsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AssuredWorkloadsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = AssuredWorkloadsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AssuredWorkloadsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = AssuredWorkloadsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = AssuredWorkloadsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = AssuredWorkloadsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AssuredWorkloadsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = AssuredWorkloadsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AssuredWorkloadsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AssuredWorkloadsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AssuredWorkloadsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "organizations/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "organizations/sample1/locations/sample2/operations/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "organizations/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/__init__.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py new file mode 100644 index 000000000000..92a397d1292e --- /dev/null +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py @@ -0,0 +1,4551 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service import ( + AssuredWorkloadsServiceAsyncClient, + AssuredWorkloadsServiceClient, + pagers, + transports, +) +from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + AssuredWorkloadsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AssuredWorkloadsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AssuredWorkloadsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AssuredWorkloadsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AssuredWorkloadsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AssuredWorkloadsServiceClient, "grpc"), + (AssuredWorkloadsServiceAsyncClient, "grpc_asyncio"), + (AssuredWorkloadsServiceClient, "rest"), + ], +) +def test_assured_workloads_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "assuredworkloads.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://assuredworkloads.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AssuredWorkloadsServiceGrpcTransport, "grpc"), + (transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AssuredWorkloadsServiceRestTransport, "rest"), + ], +) +def test_assured_workloads_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AssuredWorkloadsServiceClient, "grpc"), + (AssuredWorkloadsServiceAsyncClient, "grpc_asyncio"), + (AssuredWorkloadsServiceClient, "rest"), + ], +) +def test_assured_workloads_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "assuredworkloads.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://assuredworkloads.googleapis.com" + ) + + +def test_assured_workloads_service_client_get_transport_class(): + transport = AssuredWorkloadsServiceClient.get_transport_class() + available_transports = [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceRestTransport, + ] + assert transport in available_transports + + transport = AssuredWorkloadsServiceClient.get_transport_class("grpc") + assert transport == transports.AssuredWorkloadsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + AssuredWorkloadsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceClient), +) +@mock.patch.object( + AssuredWorkloadsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceAsyncClient), +) +def test_assured_workloads_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AssuredWorkloadsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AssuredWorkloadsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + "true", + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + "false", + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceRestTransport, + "rest", + "true", + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + AssuredWorkloadsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceClient), +) +@mock.patch.object( + AssuredWorkloadsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_assured_workloads_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [AssuredWorkloadsServiceClient, AssuredWorkloadsServiceAsyncClient] +) +@mock.patch.object( + AssuredWorkloadsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceClient), +) +@mock.patch.object( + AssuredWorkloadsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssuredWorkloadsServiceAsyncClient), +) +def test_assured_workloads_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceRestTransport, + "rest", + ), + ], +) +def test_assured_workloads_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_assured_workloads_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_assured_workloads_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.transports.AssuredWorkloadsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AssuredWorkloadsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_assured_workloads_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "assuredworkloads.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="assuredworkloads.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.CreateWorkloadRequest, + dict, + ], +) +def test_create_workload(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.CreateWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_workload_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + client.create_workload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.CreateWorkloadRequest() + + +@pytest.mark.asyncio +async def test_create_workload_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.CreateWorkloadRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.CreateWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_workload_async_from_dict(): + await test_create_workload_async(request_type=dict) + + +def test_create_workload_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.CreateWorkloadRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_workload_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.CreateWorkloadRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_workload_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_workload( + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workload + mock_val = assuredworkloads.Workload(name="name_value") + assert arg == mock_val + + +def test_create_workload_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workload( + assuredworkloads.CreateWorkloadRequest(), + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_workload_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_workload( + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].workload + mock_val = assuredworkloads.Workload(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_workload_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_workload( + assuredworkloads.CreateWorkloadRequest(), + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.UpdateWorkloadRequest, + dict, + ], +) +def test_update_workload(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload( + name="name_value", + display_name="display_name_value", + compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, + billing_account="billing_account_value", + etag="etag_value", + provisioned_resources_parent="provisioned_resources_parent_value", + kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, + enable_sovereign_controls=True, + compliant_but_disallowed_services=[ + "compliant_but_disallowed_services_value" + ], + ) + response = client.update_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.UpdateWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Workload) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 + assert response.billing_account == "billing_account_value" + assert response.etag == "etag_value" + assert response.provisioned_resources_parent == "provisioned_resources_parent_value" + assert ( + response.kaj_enrollment_state + == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING + ) + assert response.enable_sovereign_controls is True + assert response.compliant_but_disallowed_services == [ + "compliant_but_disallowed_services_value" + ] + + +def test_update_workload_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + client.update_workload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.UpdateWorkloadRequest() + + +@pytest.mark.asyncio +async def test_update_workload_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.UpdateWorkloadRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Workload( + name="name_value", + display_name="display_name_value", + compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, + billing_account="billing_account_value", + etag="etag_value", + provisioned_resources_parent="provisioned_resources_parent_value", + kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, + enable_sovereign_controls=True, + compliant_but_disallowed_services=[ + "compliant_but_disallowed_services_value" + ], + ) + ) + response = await client.update_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.UpdateWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Workload) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 + assert response.billing_account == "billing_account_value" + assert response.etag == "etag_value" + assert response.provisioned_resources_parent == "provisioned_resources_parent_value" + assert ( + response.kaj_enrollment_state + == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING + ) + assert response.enable_sovereign_controls is True + assert response.compliant_but_disallowed_services == [ + "compliant_but_disallowed_services_value" + ] + + +@pytest.mark.asyncio +async def test_update_workload_async_from_dict(): + await test_update_workload_async(request_type=dict) + + +def test_update_workload_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_workload( + workload=assuredworkloads.Workload(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].workload + mock_val = assuredworkloads.Workload(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_workload_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workload( + assuredworkloads.UpdateWorkloadRequest(), + workload=assuredworkloads.Workload(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_workload_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Workload() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_workload( + workload=assuredworkloads.Workload(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].workload + mock_val = assuredworkloads.Workload(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_workload_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_workload( + assuredworkloads.UpdateWorkloadRequest(), + workload=assuredworkloads.Workload(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.RestrictAllowedResourcesRequest, + dict, + ], +) +def test_restrict_allowed_resources(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restrict_allowed_resources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.RestrictAllowedResourcesResponse() + response = client.restrict_allowed_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.RestrictAllowedResourcesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) + + +def test_restrict_allowed_resources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restrict_allowed_resources), "__call__" + ) as call: + client.restrict_allowed_resources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.RestrictAllowedResourcesRequest() + + +@pytest.mark.asyncio +async def test_restrict_allowed_resources_async( + transport: str = "grpc_asyncio", + request_type=assuredworkloads.RestrictAllowedResourcesRequest, +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restrict_allowed_resources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.RestrictAllowedResourcesResponse() + ) + response = await client.restrict_allowed_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.RestrictAllowedResourcesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) + + +@pytest.mark.asyncio +async def test_restrict_allowed_resources_async_from_dict(): + await test_restrict_allowed_resources_async(request_type=dict) + + +def test_restrict_allowed_resources_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.RestrictAllowedResourcesRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restrict_allowed_resources), "__call__" + ) as call: + call.return_value = assuredworkloads.RestrictAllowedResourcesResponse() + client.restrict_allowed_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restrict_allowed_resources_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.RestrictAllowedResourcesRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restrict_allowed_resources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.RestrictAllowedResourcesResponse() + ) + await client.restrict_allowed_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.DeleteWorkloadRequest, + dict, + ], +) +def test_delete_workload(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.DeleteWorkloadRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_workload_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + client.delete_workload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.DeleteWorkloadRequest() + + +@pytest.mark.asyncio +async def test_delete_workload_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.DeleteWorkloadRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.DeleteWorkloadRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_workload_async_from_dict(): + await test_delete_workload_async(request_type=dict) + + +def test_delete_workload_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.DeleteWorkloadRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + call.return_value = None + client.delete_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_workload_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = assuredworkloads.DeleteWorkloadRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_workload_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_workload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_workload_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workload( + assuredworkloads.DeleteWorkloadRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_workload_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_workload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_workload_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_workload( + assuredworkloads.DeleteWorkloadRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.GetWorkloadRequest, + dict, + ], +) +def test_get_workload(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload( + name="name_value", + display_name="display_name_value", + compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, + billing_account="billing_account_value", + etag="etag_value", + provisioned_resources_parent="provisioned_resources_parent_value", + kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, + enable_sovereign_controls=True, + compliant_but_disallowed_services=[ + "compliant_but_disallowed_services_value" + ], + ) + response = client.get_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.GetWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Workload) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 + assert response.billing_account == "billing_account_value" + assert response.etag == "etag_value" + assert response.provisioned_resources_parent == "provisioned_resources_parent_value" + assert ( + response.kaj_enrollment_state + == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING + ) + assert response.enable_sovereign_controls is True + assert response.compliant_but_disallowed_services == [ + "compliant_but_disallowed_services_value" + ] + + +def test_get_workload_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + client.get_workload() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.GetWorkloadRequest() + + +@pytest.mark.asyncio +async def test_get_workload_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.GetWorkloadRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Workload( + name="name_value", + display_name="display_name_value", + compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, + billing_account="billing_account_value", + etag="etag_value", + provisioned_resources_parent="provisioned_resources_parent_value", + kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, + enable_sovereign_controls=True, + compliant_but_disallowed_services=[ + "compliant_but_disallowed_services_value" + ], + ) + ) + response = await client.get_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.GetWorkloadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.Workload) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 + assert response.billing_account == "billing_account_value" + assert response.etag == "etag_value" + assert response.provisioned_resources_parent == "provisioned_resources_parent_value" + assert ( + response.kaj_enrollment_state + == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING + ) + assert response.enable_sovereign_controls is True + assert response.compliant_but_disallowed_services == [ + "compliant_but_disallowed_services_value" + ] + + +@pytest.mark.asyncio +async def test_get_workload_async_from_dict(): + await test_get_workload_async(request_type=dict) + + +def test_get_workload_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_workload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_workload_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workload( + assuredworkloads.GetWorkloadRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_workload_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.Workload() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.Workload() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_workload( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_workload_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_workload( + assuredworkloads.GetWorkloadRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.AnalyzeWorkloadMoveRequest, + dict, + ], +) +def test_analyze_workload_move(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_workload_move), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.AnalyzeWorkloadMoveResponse( + blockers=["blockers_value"], + ) + response = client.analyze_workload_move(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.AnalyzeWorkloadMoveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.AnalyzeWorkloadMoveResponse) + assert response.blockers == ["blockers_value"] + + +def test_analyze_workload_move_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_workload_move), "__call__" + ) as call: + client.analyze_workload_move() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.AnalyzeWorkloadMoveRequest() + + +@pytest.mark.asyncio +async def test_analyze_workload_move_async( + transport: str = "grpc_asyncio", + request_type=assuredworkloads.AnalyzeWorkloadMoveRequest, +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_workload_move), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.AnalyzeWorkloadMoveResponse( + blockers=["blockers_value"], + ) + ) + response = await client.analyze_workload_move(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.AnalyzeWorkloadMoveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.AnalyzeWorkloadMoveResponse) + assert response.blockers == ["blockers_value"] + + +@pytest.mark.asyncio +async def test_analyze_workload_move_async_from_dict(): + await test_analyze_workload_move_async(request_type=dict) + + +def test_analyze_workload_move_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_workload_move), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.AnalyzeWorkloadMoveResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_workload_move( + project="project_value", + target="target_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].target + mock_val = "target_value" + assert arg == mock_val + assert args[0].project == "project_value" + + +def test_analyze_workload_move_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_workload_move( + assuredworkloads.AnalyzeWorkloadMoveRequest(), + project="project_value", + target="target_value", + ) + + +@pytest.mark.asyncio +async def test_analyze_workload_move_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_workload_move), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.AnalyzeWorkloadMoveResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.AnalyzeWorkloadMoveResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_workload_move( + project="project_value", + target="target_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].target + mock_val = "target_value" + assert arg == mock_val + assert args[0].project == "project_value" + + +@pytest.mark.asyncio +async def test_analyze_workload_move_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_workload_move( + assuredworkloads.AnalyzeWorkloadMoveRequest(), + project="project_value", + target="target_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.ListWorkloadsRequest, + dict, + ], +) +def test_list_workloads(request_type, transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.ListWorkloadsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_workloads(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.ListWorkloadsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkloadsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_workloads_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + client.list_workloads() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.ListWorkloadsRequest() + + +@pytest.mark.asyncio +async def test_list_workloads_async( + transport: str = "grpc_asyncio", request_type=assuredworkloads.ListWorkloadsRequest +): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.ListWorkloadsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_workloads(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == assuredworkloads.ListWorkloadsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkloadsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_workloads_async_from_dict(): + await test_list_workloads_async(request_type=dict) + + +def test_list_workloads_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.ListWorkloadsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_workloads( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_workloads_flattened_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workloads( + assuredworkloads.ListWorkloadsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_workloads_flattened_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = assuredworkloads.ListWorkloadsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + assuredworkloads.ListWorkloadsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_workloads( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_workloads_flattened_error_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_workloads( + assuredworkloads.ListWorkloadsRequest(), + parent="parent_value", + ) + + +def test_list_workloads_pager(transport_name: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + next_page_token="abc", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[], + next_page_token="def", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_workloads(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assuredworkloads.Workload) for i in results) + + +def test_list_workloads_pages(transport_name: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + next_page_token="abc", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[], + next_page_token="def", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + ), + RuntimeError, + ) + pages = list(client.list_workloads(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_workloads_async_pager(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workloads), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + next_page_token="abc", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[], + next_page_token="def", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_workloads( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, assuredworkloads.Workload) for i in responses) + + +@pytest.mark.asyncio +async def test_list_workloads_async_pages(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workloads), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + next_page_token="abc", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[], + next_page_token="def", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + ], + next_page_token="ghi", + ), + assuredworkloads.ListWorkloadsResponse( + workloads=[ + assuredworkloads.Workload(), + assuredworkloads.Workload(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_workloads(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.CreateWorkloadRequest, + dict, + ], +) +def test_create_workload_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init["workload"] = { + "name": "name_value", + "display_name": "display_name_value", + "resources": [{"resource_id": 1172, "resource_type": 1}], + "compliance_regime": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "billing_account": "billing_account_value", + "il4_settings": { + "kms_settings": { + "next_rotation_time": {}, + "rotation_period": {"seconds": 751, "nanos": 543}, + } + }, + "cjis_settings": {"kms_settings": {}}, + "fedramp_high_settings": {"kms_settings": {}}, + "fedramp_moderate_settings": {"kms_settings": {}}, + "etag": "etag_value", + "labels": {}, + "provisioned_resources_parent": "provisioned_resources_parent_value", + "kms_settings": {}, + "resource_settings": [ + { + "resource_id": "resource_id_value", + "resource_type": 1, + "display_name": "display_name_value", + } + ], + "kaj_enrollment_state": 1, + "enable_sovereign_controls": True, + "saa_enrollment_response": {"setup_status": 1, "setup_errors": [1]}, + "compliant_but_disallowed_services": [ + "compliant_but_disallowed_services_value1", + "compliant_but_disallowed_services_value2", + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_workload(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_workload_rest_required_fields( + request_type=assuredworkloads.CreateWorkloadRequest, +): + transport_class = transports.AssuredWorkloadsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workload._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workload._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("external_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_workload(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_workload_rest_unset_required_fields(): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_workload._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("externalId",)) + & set( + ( + "parent", + "workload", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_workload_rest_interceptors(null_interceptor): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AssuredWorkloadsServiceRestInterceptor(), + ) + client = AssuredWorkloadsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "post_create_workload" + ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "pre_create_workload" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = assuredworkloads.CreateWorkloadRequest.pb( + assuredworkloads.CreateWorkloadRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = assuredworkloads.CreateWorkloadRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_workload( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_workload_rest_bad_request( + transport: str = "rest", request_type=assuredworkloads.CreateWorkloadRequest +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init["workload"] = { + "name": "name_value", + "display_name": "display_name_value", + "resources": [{"resource_id": 1172, "resource_type": 1}], + "compliance_regime": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "billing_account": "billing_account_value", + "il4_settings": { + "kms_settings": { + "next_rotation_time": {}, + "rotation_period": {"seconds": 751, "nanos": 543}, + } + }, + "cjis_settings": {"kms_settings": {}}, + "fedramp_high_settings": {"kms_settings": {}}, + "fedramp_moderate_settings": {"kms_settings": {}}, + "etag": "etag_value", + "labels": {}, + "provisioned_resources_parent": "provisioned_resources_parent_value", + "kms_settings": {}, + "resource_settings": [ + { + "resource_id": "resource_id_value", + "resource_type": 1, + "display_name": "display_name_value", + } + ], + "kaj_enrollment_state": 1, + "enable_sovereign_controls": True, + "saa_enrollment_response": {"setup_status": 1, "setup_errors": [1]}, + "compliant_but_disallowed_services": [ + "compliant_but_disallowed_services_value1", + "compliant_but_disallowed_services_value2", + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_workload(request) + + +def test_create_workload_rest_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_workload(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{parent=organizations/*/locations/*}/workloads" + % client.transport._host, + args[1], + ) + + +def test_create_workload_rest_flattened_error(transport: str = "rest"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workload( + assuredworkloads.CreateWorkloadRequest(), + parent="parent_value", + workload=assuredworkloads.Workload(name="name_value"), + ) + + +def test_create_workload_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_update_workload_rest_no_http_options(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = assuredworkloads.UpdateWorkloadRequest() + with pytest.raises(RuntimeError): + client.update_workload(request) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.RestrictAllowedResourcesRequest, + dict, + ], +) +def test_restrict_allowed_resources_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/workloads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.RestrictAllowedResourcesResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = assuredworkloads.RestrictAllowedResourcesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restrict_allowed_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) + + +def test_restrict_allowed_resources_rest_required_fields( + request_type=assuredworkloads.RestrictAllowedResourcesRequest, +): + transport_class = transports.AssuredWorkloadsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restrict_allowed_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restrict_allowed_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = assuredworkloads.RestrictAllowedResourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = assuredworkloads.RestrictAllowedResourcesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restrict_allowed_resources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restrict_allowed_resources_rest_unset_required_fields(): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restrict_allowed_resources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "restrictionType", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restrict_allowed_resources_rest_interceptors(null_interceptor): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AssuredWorkloadsServiceRestInterceptor(), + ) + client = AssuredWorkloadsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, + "post_restrict_allowed_resources", + ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, + "pre_restrict_allowed_resources", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = assuredworkloads.RestrictAllowedResourcesRequest.pb( + assuredworkloads.RestrictAllowedResourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + assuredworkloads.RestrictAllowedResourcesResponse.to_json( + assuredworkloads.RestrictAllowedResourcesResponse() + ) + ) + + request = assuredworkloads.RestrictAllowedResourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = assuredworkloads.RestrictAllowedResourcesResponse() + + client.restrict_allowed_resources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restrict_allowed_resources_rest_bad_request( + transport: str = "rest", + request_type=assuredworkloads.RestrictAllowedResourcesRequest, +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/workloads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restrict_allowed_resources(request) + + +def test_restrict_allowed_resources_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + assuredworkloads.DeleteWorkloadRequest, + dict, + ], +) +def test_delete_workload_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/workloads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_workload(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_workload_rest_required_fields( + request_type=assuredworkloads.DeleteWorkloadRequest, +): + transport_class = transports.AssuredWorkloadsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workload._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workload._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_workload(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_workload_rest_unset_required_fields(): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_workload._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_workload_rest_interceptors(null_interceptor): + transport = transports.AssuredWorkloadsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AssuredWorkloadsServiceRestInterceptor(), + ) + client = AssuredWorkloadsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, "pre_delete_workload" + ) as pre: + pre.assert_not_called() + pb_message = assuredworkloads.DeleteWorkloadRequest.pb( + assuredworkloads.DeleteWorkloadRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = assuredworkloads.DeleteWorkloadRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_workload( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_workload_rest_bad_request( + transport: str = "rest", request_type=assuredworkloads.DeleteWorkloadRequest +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/workloads/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_workload(request) + + +def test_delete_workload_rest_flattened(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/workloads/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_workload(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=organizations/*/locations/*/workloads/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_workload_rest_flattened_error(transport: str = "rest"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workload( + assuredworkloads.DeleteWorkloadRequest(), + name="name_value", + ) + + +def test_delete_workload_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_get_workload_rest_no_http_options(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = assuredworkloads.GetWorkloadRequest() + with pytest.raises(RuntimeError): + client.get_workload(request) + + +def test_analyze_workload_move_rest_no_http_options(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = assuredworkloads.AnalyzeWorkloadMoveRequest() + with pytest.raises(RuntimeError): + client.analyze_workload_move(request) + + +def test_list_workloads_rest_no_http_options(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = assuredworkloads.ListWorkloadsRequest() + with pytest.raises(RuntimeError): + client.list_workloads(request) + + +def test_update_workload_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.update_workload({}) + assert "Method UpdateWorkload is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_get_workload_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.get_workload({}) + assert "Method GetWorkload is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_analyze_workload_move_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.analyze_workload_move({}) + assert "Method AnalyzeWorkloadMove is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_list_workloads_rest_error(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.list_workloads({}) + assert "Method ListWorkloads is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssuredWorkloadsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AssuredWorkloadsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AssuredWorkloadsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssuredWorkloadsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AssuredWorkloadsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AssuredWorkloadsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + transports.AssuredWorkloadsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AssuredWorkloadsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AssuredWorkloadsServiceGrpcTransport, + ) + + +def test_assured_workloads_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AssuredWorkloadsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_assured_workloads_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AssuredWorkloadsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_workload", + "update_workload", + "restrict_allowed_resources", + "delete_workload", + "get_workload", + "analyze_workload_move", + "list_workloads", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_assured_workloads_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AssuredWorkloadsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_assured_workloads_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AssuredWorkloadsServiceTransport() + adc.assert_called_once() + + +def test_assured_workloads_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AssuredWorkloadsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + ], +) +def test_assured_workloads_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + transports.AssuredWorkloadsServiceRestTransport, + ], +) +def test_assured_workloads_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AssuredWorkloadsServiceGrpcTransport, grpc_helpers), + (transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_assured_workloads_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "assuredworkloads.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="assuredworkloads.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + ], +) +def test_assured_workloads_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_assured_workloads_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AssuredWorkloadsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_assured_workloads_service_rest_lro_client(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_assured_workloads_service_host_no_port(transport_name): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="assuredworkloads.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "assuredworkloads.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://assuredworkloads.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_assured_workloads_service_host_with_port(transport_name): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="assuredworkloads.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "assuredworkloads.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://assuredworkloads.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_assured_workloads_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AssuredWorkloadsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AssuredWorkloadsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_workload._session + session2 = client2.transport.create_workload._session + assert session1 != session2 + session1 = client1.transport.update_workload._session + session2 = client2.transport.update_workload._session + assert session1 != session2 + session1 = client1.transport.restrict_allowed_resources._session + session2 = client2.transport.restrict_allowed_resources._session + assert session1 != session2 + session1 = client1.transport.delete_workload._session + session2 = client2.transport.delete_workload._session + assert session1 != session2 + session1 = client1.transport.get_workload._session + session2 = client2.transport.get_workload._session + assert session1 != session2 + session1 = client1.transport.analyze_workload_move._session + session2 = client2.transport.analyze_workload_move._session + assert session1 != session2 + session1 = client1.transport.list_workloads._session + session2 = client2.transport.list_workloads._session + assert session1 != session2 + + +def test_assured_workloads_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AssuredWorkloadsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_assured_workloads_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AssuredWorkloadsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + ], +) +def test_assured_workloads_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssuredWorkloadsServiceGrpcTransport, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + ], +) +def test_assured_workloads_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_assured_workloads_service_grpc_lro_client(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_assured_workloads_service_grpc_lro_async_client(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_workload_path(): + organization = "squid" + location = "clam" + workload = "whelk" + expected = ( + "organizations/{organization}/locations/{location}/workloads/{workload}".format( + organization=organization, + location=location, + workload=workload, + ) + ) + actual = AssuredWorkloadsServiceClient.workload_path( + organization, location, workload + ) + assert expected == actual + + +def test_parse_workload_path(): + expected = { + "organization": "octopus", + "location": "oyster", + "workload": "nudibranch", + } + path = AssuredWorkloadsServiceClient.workload_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_workload_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AssuredWorkloadsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = AssuredWorkloadsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AssuredWorkloadsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = AssuredWorkloadsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AssuredWorkloadsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = AssuredWorkloadsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = AssuredWorkloadsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = AssuredWorkloadsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AssuredWorkloadsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = AssuredWorkloadsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AssuredWorkloadsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AssuredWorkloadsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AssuredWorkloadsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AssuredWorkloadsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "organizations/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "name": "organizations/sample1/locations/sample2/operations/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "organizations/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_get_operation(transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = AssuredWorkloadsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AssuredWorkloadsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + AssuredWorkloadsServiceClient, + transports.AssuredWorkloadsServiceGrpcTransport, + ), + ( + AssuredWorkloadsServiceAsyncClient, + transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-bare-metal-solution/.flake8 b/packages/google-cloud-bare-metal-solution/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-bare-metal-solution/.flake8 +++ b/packages/google-cloud-bare-metal-solution/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bare-metal-solution/CHANGELOG.md b/packages/google-cloud-bare-metal-solution/CHANGELOG.md index 63132e081195..b21644dbb895 100644 --- a/packages/google-cloud-bare-metal-solution/CHANGELOG.md +++ b/packages/google-cloud-bare-metal-solution/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## [1.5.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bare-metal-solution-v1.4.2...google-cloud-bare-metal-solution-v1.5.0) (2023-08-31) + + +### Features + +* Add new Instance state values ([72b4fc5](https://github.com/googleapis/google-cloud-python/commit/72b4fc5d5dced9491cae99a228dbc7604474dbe3)) +* Add NFSShare resource and methods ([72b4fc5](https://github.com/googleapis/google-cloud-python/commit/72b4fc5d5dced9491cae99a228dbc7604474dbe3)) +* Add OsImage resource and methods ([72b4fc5](https://github.com/googleapis/google-cloud-python/commit/72b4fc5d5dced9491cae99a228dbc7604474dbe3)) +* Add ProvisioningConfig resource and methods ([72b4fc5](https://github.com/googleapis/google-cloud-python/commit/72b4fc5d5dced9491cae99a228dbc7604474dbe3)) +* Add RPC EvictLun ([72b4fc5](https://github.com/googleapis/google-cloud-python/commit/72b4fc5d5dced9491cae99a228dbc7604474dbe3)) +* Add RPCs Enable/Disable InteractiveSerialConsole ([72b4fc5](https://github.com/googleapis/google-cloud-python/commit/72b4fc5d5dced9491cae99a228dbc7604474dbe3)) +* Add RPCs Rename/Evict Volume ([72b4fc5](https://github.com/googleapis/google-cloud-python/commit/72b4fc5d5dced9491cae99a228dbc7604474dbe3)) +* Add several new resources and RPCs ([72b4fc5](https://github.com/googleapis/google-cloud-python/commit/72b4fc5d5dced9491cae99a228dbc7604474dbe3)) +* Add SSHKey resource and methods ([72b4fc5](https://github.com/googleapis/google-cloud-python/commit/72b4fc5d5dced9491cae99a228dbc7604474dbe3)) +* Add VolumeSnapshot resource and methods ([72b4fc5](https://github.com/googleapis/google-cloud-python/commit/72b4fc5d5dced9491cae99a228dbc7604474dbe3)) + ## [1.4.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bare-metal-solution-v1.4.1...google-cloud-bare-metal-solution-v1.4.2) (2023-07-05) diff --git a/packages/google-cloud-bare-metal-solution/CONTRIBUTING.rst b/packages/google-cloud-bare-metal-solution/CONTRIBUTING.rst index d9f64097ab56..3b093feef227 100644 --- a/packages/google-cloud-bare-metal-solution/CONTRIBUTING.rst +++ b/packages/google-cloud-bare-metal-solution/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-bare-metal-solution/noxfile.py ********** diff --git a/packages/google-cloud-bare-metal-solution/MANIFEST.in b/packages/google-cloud-bare-metal-solution/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-bare-metal-solution/MANIFEST.in +++ b/packages/google-cloud-bare-metal-solution/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bare-metal-solution/README.rst b/packages/google-cloud-bare-metal-solution/README.rst index 56b3d3527a1d..036ae2e6eb65 100644 --- a/packages/google-cloud-bare-metal-solution/README.rst +++ b/packages/google-cloud-bare-metal-solution/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-bare-metal-solution + pip install google-cloud-bare-metal-solution Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-bare-metal-solution + py -m venv + .\\Scripts\activate + pip install google-cloud-bare-metal-solution Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-bare-metal-solution/docs/conf.py b/packages/google-cloud-bare-metal-solution/docs/conf.py index c65be50e0c70..7b0e66393a5d 100644 --- a/packages/google-cloud-bare-metal-solution/docs/conf.py +++ b/packages/google-cloud-bare-metal-solution/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/__init__.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/__init__.py index 2a3f5dbd5d7e..ab2fff2254ac 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/__init__.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/__init__.py @@ -28,12 +28,21 @@ OperationMetadata, ResetInstanceResponse, ) +from google.cloud.bare_metal_solution_v2.types.common import ( + VolumePerformanceTier, + WorkloadProfile, +) from google.cloud.bare_metal_solution_v2.types.instance import ( DetachLunRequest, + DisableInteractiveSerialConsoleRequest, + DisableInteractiveSerialConsoleResponse, + EnableInteractiveSerialConsoleRequest, + EnableInteractiveSerialConsoleResponse, GetInstanceRequest, Instance, ListInstancesRequest, ListInstancesResponse, + RenameInstanceRequest, ResetInstanceRequest, ServerNetworkTemplate, StartInstanceRequest, @@ -43,6 +52,7 @@ UpdateInstanceRequest, ) from google.cloud.bare_metal_solution_v2.types.lun import ( + EvictLunRequest, GetLunRequest, ListLunsRequest, ListLunsResponse, @@ -58,35 +68,85 @@ LogicalInterface, Network, NetworkAddressReservation, + NetworkMountPoint, NetworkUsage, + RenameNetworkRequest, UpdateNetworkRequest, ) from google.cloud.bare_metal_solution_v2.types.nfs_share import ( + CreateNfsShareRequest, + DeleteNfsShareRequest, GetNfsShareRequest, ListNfsSharesRequest, ListNfsSharesResponse, NfsShare, + RenameNfsShareRequest, UpdateNfsShareRequest, ) +from google.cloud.bare_metal_solution_v2.types.osimage import ( + ListOSImagesRequest, + ListOSImagesResponse, + OSImage, +) +from google.cloud.bare_metal_solution_v2.types.provisioning import ( + CreateProvisioningConfigRequest, + GetProvisioningConfigRequest, + InstanceConfig, + InstanceQuota, + ListProvisioningQuotasRequest, + ListProvisioningQuotasResponse, + NetworkConfig, + ProvisioningConfig, + ProvisioningQuota, + SubmitProvisioningConfigRequest, + SubmitProvisioningConfigResponse, + UpdateProvisioningConfigRequest, + VolumeConfig, +) +from google.cloud.bare_metal_solution_v2.types.ssh_key import ( + CreateSSHKeyRequest, + DeleteSSHKeyRequest, + ListSSHKeysRequest, + ListSSHKeysResponse, + SSHKey, +) from google.cloud.bare_metal_solution_v2.types.volume import ( + EvictVolumeRequest, GetVolumeRequest, ListVolumesRequest, ListVolumesResponse, + RenameVolumeRequest, ResizeVolumeRequest, UpdateVolumeRequest, Volume, ) +from google.cloud.bare_metal_solution_v2.types.volume_snapshot import ( + CreateVolumeSnapshotRequest, + DeleteVolumeSnapshotRequest, + GetVolumeSnapshotRequest, + ListVolumeSnapshotsRequest, + ListVolumeSnapshotsResponse, + RestoreVolumeSnapshotRequest, + VolumeSnapshot, +) __all__ = ( "BareMetalSolutionClient", "BareMetalSolutionAsyncClient", "OperationMetadata", "ResetInstanceResponse", + "VolumePerformanceTier", + "WorkloadProfile", "DetachLunRequest", + "DisableInteractiveSerialConsoleRequest", + "DisableInteractiveSerialConsoleResponse", + "EnableInteractiveSerialConsoleRequest", + "EnableInteractiveSerialConsoleResponse", "GetInstanceRequest", "Instance", "ListInstancesRequest", "ListInstancesResponse", + "RenameInstanceRequest", "ResetInstanceRequest", "ServerNetworkTemplate", "StartInstanceRequest", @@ -94,6 +154,7 @@ "StopInstanceRequest", "StopInstanceResponse", "UpdateInstanceRequest", + "EvictLunRequest", "GetLunRequest", "ListLunsRequest", "ListLunsResponse", @@ -106,18 +167,53 @@ "LogicalInterface", "Network", "NetworkAddressReservation", + "NetworkMountPoint", "NetworkUsage", + "RenameNetworkRequest", "UpdateNetworkRequest", "VRF", + "CreateNfsShareRequest", + "DeleteNfsShareRequest", "GetNfsShareRequest", "ListNfsSharesRequest", "ListNfsSharesResponse", "NfsShare", + "RenameNfsShareRequest", "UpdateNfsShareRequest", + "ListOSImagesRequest", + "ListOSImagesResponse", + "OSImage", + "CreateProvisioningConfigRequest", + "GetProvisioningConfigRequest", + "InstanceConfig", + "InstanceQuota", + "ListProvisioningQuotasRequest", + "ListProvisioningQuotasResponse", + "NetworkConfig", + "ProvisioningConfig", + "ProvisioningQuota", + "SubmitProvisioningConfigRequest", + "SubmitProvisioningConfigResponse", + "UpdateProvisioningConfigRequest", + "VolumeConfig", + "CreateSSHKeyRequest", + "DeleteSSHKeyRequest", + "ListSSHKeysRequest", + "ListSSHKeysResponse", + "SSHKey", + "EvictVolumeRequest", "GetVolumeRequest", "ListVolumesRequest", "ListVolumesResponse", + "RenameVolumeRequest", "ResizeVolumeRequest", "UpdateVolumeRequest", "Volume", + "CreateVolumeSnapshotRequest", + "DeleteVolumeSnapshotRequest", + "GetVolumeSnapshotRequest", + "ListVolumeSnapshotsRequest", + "ListVolumeSnapshotsResponse", + "RestoreVolumeSnapshotRequest", + "VolumeSnapshot", ) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py index 360a0d13ebdd..c024d69dafa9 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.5.0" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/__init__.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/__init__.py index a4743d20d9c7..222379fe7a32 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/__init__.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/__init__.py @@ -23,12 +23,18 @@ BareMetalSolutionClient, ) from .types.baremetalsolution import OperationMetadata, ResetInstanceResponse +from .types.common import VolumePerformanceTier, WorkloadProfile from .types.instance import ( DetachLunRequest, + DisableInteractiveSerialConsoleRequest, + DisableInteractiveSerialConsoleResponse, + EnableInteractiveSerialConsoleRequest, + EnableInteractiveSerialConsoleResponse, GetInstanceRequest, Instance, ListInstancesRequest, ListInstancesResponse, + RenameInstanceRequest, ResetInstanceRequest, ServerNetworkTemplate, StartInstanceRequest, @@ -37,7 +43,13 @@ StopInstanceResponse, UpdateInstanceRequest, ) -from .types.lun import GetLunRequest, ListLunsRequest, ListLunsResponse, Lun +from .types.lun import ( + EvictLunRequest, + GetLunRequest, + ListLunsRequest, + ListLunsResponse, + Lun, +) from .types.network import ( VRF, GetNetworkRequest, @@ -48,35 +60,91 @@ LogicalInterface, Network, NetworkAddressReservation, + NetworkMountPoint, NetworkUsage, + RenameNetworkRequest, UpdateNetworkRequest, ) from .types.nfs_share import ( + CreateNfsShareRequest, + DeleteNfsShareRequest, GetNfsShareRequest, ListNfsSharesRequest, ListNfsSharesResponse, NfsShare, + RenameNfsShareRequest, UpdateNfsShareRequest, ) +from .types.osimage import ListOSImagesRequest, ListOSImagesResponse, OSImage +from .types.provisioning import ( + CreateProvisioningConfigRequest, + GetProvisioningConfigRequest, + InstanceConfig, + InstanceQuota, + ListProvisioningQuotasRequest, + ListProvisioningQuotasResponse, + NetworkConfig, + ProvisioningConfig, + ProvisioningQuota, + SubmitProvisioningConfigRequest, + SubmitProvisioningConfigResponse, + UpdateProvisioningConfigRequest, + VolumeConfig, +) +from .types.ssh_key import ( + CreateSSHKeyRequest, + DeleteSSHKeyRequest, + ListSSHKeysRequest, + ListSSHKeysResponse, + SSHKey, +) from .types.volume import ( + EvictVolumeRequest, GetVolumeRequest, ListVolumesRequest, ListVolumesResponse, + RenameVolumeRequest, ResizeVolumeRequest, UpdateVolumeRequest, Volume, ) +from .types.volume_snapshot import ( + CreateVolumeSnapshotRequest, + DeleteVolumeSnapshotRequest, + GetVolumeSnapshotRequest, + ListVolumeSnapshotsRequest, + ListVolumeSnapshotsResponse, + RestoreVolumeSnapshotRequest, + VolumeSnapshot, +) __all__ = ( "BareMetalSolutionAsyncClient", "BareMetalSolutionClient", + "CreateNfsShareRequest", + "CreateProvisioningConfigRequest", + "CreateSSHKeyRequest", + "CreateVolumeSnapshotRequest", + "DeleteNfsShareRequest", + "DeleteSSHKeyRequest", + "DeleteVolumeSnapshotRequest", "DetachLunRequest", + "DisableInteractiveSerialConsoleRequest", + "DisableInteractiveSerialConsoleResponse", + "EnableInteractiveSerialConsoleRequest", + "EnableInteractiveSerialConsoleResponse", + "EvictLunRequest", + "EvictVolumeRequest", "GetInstanceRequest", "GetLunRequest", "GetNetworkRequest", "GetNfsShareRequest", + "GetProvisioningConfigRequest", "GetVolumeRequest", + "GetVolumeSnapshotRequest", "Instance", + "InstanceConfig", + "InstanceQuota", "ListInstancesRequest", "ListInstancesResponse", "ListLunsRequest", @@ -87,27 +155,53 @@ "ListNetworksResponse", "ListNfsSharesRequest", "ListNfsSharesResponse", + "ListOSImagesRequest", + "ListOSImagesResponse", + "ListProvisioningQuotasRequest", + "ListProvisioningQuotasResponse", + "ListSSHKeysRequest", + "ListSSHKeysResponse", + "ListVolumeSnapshotsRequest", + "ListVolumeSnapshotsResponse", "ListVolumesRequest", "ListVolumesResponse", "LogicalInterface", "Lun", "Network", "NetworkAddressReservation", + "NetworkConfig", + "NetworkMountPoint", "NetworkUsage", "NfsShare", + "OSImage", "OperationMetadata", + "ProvisioningConfig", + "ProvisioningQuota", + "RenameInstanceRequest", + "RenameNetworkRequest", + "RenameNfsShareRequest", + "RenameVolumeRequest", "ResetInstanceRequest", "ResetInstanceResponse", "ResizeVolumeRequest", + "RestoreVolumeSnapshotRequest", + "SSHKey", "ServerNetworkTemplate", "StartInstanceRequest", "StartInstanceResponse", "StopInstanceRequest", "StopInstanceResponse", + "SubmitProvisioningConfigRequest", + "SubmitProvisioningConfigResponse", "UpdateInstanceRequest", "UpdateNetworkRequest", "UpdateNfsShareRequest", + "UpdateProvisioningConfigRequest", "UpdateVolumeRequest", "VRF", "Volume", + "VolumeConfig", + "VolumePerformanceTier", + "VolumeSnapshot", + "WorkloadProfile", ) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_metadata.json b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_metadata.json index d63d3aa71f7d..bbf404937b49 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_metadata.json +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_metadata.json @@ -10,11 +10,66 @@ "grpc": { "libraryClient": "BareMetalSolutionClient", "rpcs": { + "CreateNfsShare": { + "methods": [ + "create_nfs_share" + ] + }, + "CreateProvisioningConfig": { + "methods": [ + "create_provisioning_config" + ] + }, + "CreateSSHKey": { + "methods": [ + "create_ssh_key" + ] + }, + "CreateVolumeSnapshot": { + "methods": [ + "create_volume_snapshot" + ] + }, + "DeleteNfsShare": { + "methods": [ + "delete_nfs_share" + ] + }, + "DeleteSSHKey": { + "methods": [ + "delete_ssh_key" + ] + }, + "DeleteVolumeSnapshot": { + "methods": [ + "delete_volume_snapshot" + ] + }, "DetachLun": { "methods": [ "detach_lun" ] }, + "DisableInteractiveSerialConsole": { + "methods": [ + "disable_interactive_serial_console" + ] + }, + "EnableInteractiveSerialConsole": { + "methods": [ + "enable_interactive_serial_console" + ] + }, + "EvictLun": { + "methods": [ + "evict_lun" + ] + }, + "EvictVolume": { + "methods": [ + "evict_volume" + ] + }, "GetInstance": { "methods": [ "get_instance" @@ -35,11 +90,21 @@ "get_nfs_share" ] }, + "GetProvisioningConfig": { + "methods": [ + "get_provisioning_config" + ] + }, "GetVolume": { "methods": [ "get_volume" ] }, + "GetVolumeSnapshot": { + "methods": [ + "get_volume_snapshot" + ] + }, "ListInstances": { "methods": [ "list_instances" @@ -65,11 +130,51 @@ "list_nfs_shares" ] }, + "ListOSImages": { + "methods": [ + "list_os_images" + ] + }, + "ListProvisioningQuotas": { + "methods": [ + "list_provisioning_quotas" + ] + }, + "ListSSHKeys": { + "methods": [ + "list_ssh_keys" + ] + }, + "ListVolumeSnapshots": { + "methods": [ + "list_volume_snapshots" + ] + }, "ListVolumes": { "methods": [ "list_volumes" ] }, + "RenameInstance": { + "methods": [ + "rename_instance" + ] + }, + "RenameNetwork": { + "methods": [ + "rename_network" + ] + }, + "RenameNfsShare": { + "methods": [ + "rename_nfs_share" + ] + }, + "RenameVolume": { + "methods": [ + "rename_volume" + ] + }, "ResetInstance": { "methods": [ "reset_instance" @@ -80,6 +185,11 @@ "resize_volume" ] }, + "RestoreVolumeSnapshot": { + "methods": [ + "restore_volume_snapshot" + ] + }, "StartInstance": { "methods": [ "start_instance" @@ -90,6 +200,11 @@ "stop_instance" ] }, + "SubmitProvisioningConfig": { + "methods": [ + "submit_provisioning_config" + ] + }, "UpdateInstance": { "methods": [ "update_instance" @@ -105,6 +220,11 @@ "update_nfs_share" ] }, + "UpdateProvisioningConfig": { + "methods": [ + "update_provisioning_config" + ] + }, "UpdateVolume": { "methods": [ "update_volume" @@ -115,11 +235,66 @@ "grpc-async": { "libraryClient": "BareMetalSolutionAsyncClient", "rpcs": { + "CreateNfsShare": { + "methods": [ + "create_nfs_share" + ] + }, + "CreateProvisioningConfig": { + "methods": [ + "create_provisioning_config" + ] + }, + "CreateSSHKey": { + "methods": [ + "create_ssh_key" + ] + }, + "CreateVolumeSnapshot": { + "methods": [ + "create_volume_snapshot" + ] + }, + "DeleteNfsShare": { + "methods": [ + "delete_nfs_share" + ] + }, + "DeleteSSHKey": { + "methods": [ + "delete_ssh_key" + ] + }, + "DeleteVolumeSnapshot": { + "methods": [ + "delete_volume_snapshot" + ] + }, "DetachLun": { "methods": [ "detach_lun" ] }, + "DisableInteractiveSerialConsole": { + "methods": [ + "disable_interactive_serial_console" + ] + }, + "EnableInteractiveSerialConsole": { + "methods": [ + "enable_interactive_serial_console" + ] + }, + "EvictLun": { + "methods": [ + "evict_lun" + ] + }, + "EvictVolume": { + "methods": [ + "evict_volume" + ] + }, "GetInstance": { "methods": [ "get_instance" @@ -140,11 +315,21 @@ "get_nfs_share" ] }, + "GetProvisioningConfig": { + "methods": [ + "get_provisioning_config" + ] + }, "GetVolume": { "methods": [ "get_volume" ] }, + "GetVolumeSnapshot": { + "methods": [ + "get_volume_snapshot" + ] + }, "ListInstances": { "methods": [ "list_instances" @@ -170,11 +355,51 @@ "list_nfs_shares" ] }, + "ListOSImages": { + "methods": [ + "list_os_images" + ] + }, + "ListProvisioningQuotas": { + "methods": [ + "list_provisioning_quotas" + ] + }, + "ListSSHKeys": { + "methods": [ + "list_ssh_keys" + ] + }, + "ListVolumeSnapshots": { + "methods": [ + "list_volume_snapshots" + ] + }, "ListVolumes": { "methods": [ "list_volumes" ] }, + "RenameInstance": { + "methods": [ + "rename_instance" + ] + }, + "RenameNetwork": { + "methods": [ + "rename_network" + ] + }, + "RenameNfsShare": { + "methods": [ + "rename_nfs_share" + ] + }, + "RenameVolume": { + "methods": [ + "rename_volume" + ] + }, "ResetInstance": { "methods": [ "reset_instance" @@ -185,6 +410,11 @@ "resize_volume" ] }, + "RestoreVolumeSnapshot": { + "methods": [ + "restore_volume_snapshot" + ] + }, "StartInstance": { "methods": [ "start_instance" @@ -195,6 +425,11 @@ "stop_instance" ] }, + "SubmitProvisioningConfig": { + "methods": [ + "submit_provisioning_config" + ] + }, "UpdateInstance": { "methods": [ "update_instance" @@ -210,6 +445,11 @@ "update_nfs_share" ] }, + "UpdateProvisioningConfig": { + "methods": [ + "update_provisioning_config" + ] + }, "UpdateVolume": { "methods": [ "update_volume" @@ -220,11 +460,66 @@ "rest": { "libraryClient": "BareMetalSolutionClient", "rpcs": { + "CreateNfsShare": { + "methods": [ + "create_nfs_share" + ] + }, + "CreateProvisioningConfig": { + "methods": [ + "create_provisioning_config" + ] + }, + "CreateSSHKey": { + "methods": [ + "create_ssh_key" + ] + }, + "CreateVolumeSnapshot": { + "methods": [ + "create_volume_snapshot" + ] + }, + "DeleteNfsShare": { + "methods": [ + "delete_nfs_share" + ] + }, + "DeleteSSHKey": { + "methods": [ + "delete_ssh_key" + ] + }, + "DeleteVolumeSnapshot": { + "methods": [ + "delete_volume_snapshot" + ] + }, "DetachLun": { "methods": [ "detach_lun" ] }, + "DisableInteractiveSerialConsole": { + "methods": [ + "disable_interactive_serial_console" + ] + }, + "EnableInteractiveSerialConsole": { + "methods": [ + "enable_interactive_serial_console" + ] + }, + "EvictLun": { + "methods": [ + "evict_lun" + ] + }, + "EvictVolume": { + "methods": [ + "evict_volume" + ] + }, "GetInstance": { "methods": [ "get_instance" @@ -245,11 +540,21 @@ "get_nfs_share" ] }, + "GetProvisioningConfig": { + "methods": [ + "get_provisioning_config" + ] + }, "GetVolume": { "methods": [ "get_volume" ] }, + "GetVolumeSnapshot": { + "methods": [ + "get_volume_snapshot" + ] + }, "ListInstances": { "methods": [ "list_instances" @@ -275,11 +580,51 @@ "list_nfs_shares" ] }, + "ListOSImages": { + "methods": [ + "list_os_images" + ] + }, + "ListProvisioningQuotas": { + "methods": [ + "list_provisioning_quotas" + ] + }, + "ListSSHKeys": { + "methods": [ + "list_ssh_keys" + ] + }, + "ListVolumeSnapshots": { + "methods": [ + "list_volume_snapshots" + ] + }, "ListVolumes": { "methods": [ "list_volumes" ] }, + "RenameInstance": { + "methods": [ + "rename_instance" + ] + }, + "RenameNetwork": { + "methods": [ + "rename_network" + ] + }, + "RenameNfsShare": { + "methods": [ + "rename_nfs_share" + ] + }, + "RenameVolume": { + "methods": [ + "rename_volume" + ] + }, "ResetInstance": { "methods": [ "reset_instance" @@ -290,6 +635,11 @@ "resize_volume" ] }, + "RestoreVolumeSnapshot": { + "methods": [ + "restore_volume_snapshot" + ] + }, "StartInstance": { "methods": [ "start_instance" @@ -300,6 +650,11 @@ "stop_instance" ] }, + "SubmitProvisioningConfig": { + "methods": [ + "submit_provisioning_config" + ] + }, "UpdateInstance": { "methods": [ "update_instance" @@ -315,6 +670,11 @@ "update_nfs_share" ] }, + "UpdateProvisioningConfig": { + "methods": [ + "update_provisioning_config" + ] + }, "UpdateVolume": { "methods": [ "update_volume" diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py index 360a0d13ebdd..c024d69dafa9 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.5.0" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py index fe59be88fd73..1bd0ca02aca6 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py @@ -48,20 +48,28 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.bare_metal_solution_v2.services.bare_metal_solution import pagers from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share -from google.cloud.bare_metal_solution_v2.types import baremetalsolution +from google.cloud.bare_metal_solution_v2.types import ( + volume_snapshot as gcb_volume_snapshot, +) +from google.cloud.bare_metal_solution_v2.types import baremetalsolution, common from google.cloud.bare_metal_solution_v2.types import instance from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance from google.cloud.bare_metal_solution_v2.types import lun from google.cloud.bare_metal_solution_v2.types import network from google.cloud.bare_metal_solution_v2.types import network as gcb_network from google.cloud.bare_metal_solution_v2.types import nfs_share +from google.cloud.bare_metal_solution_v2.types import osimage, provisioning +from google.cloud.bare_metal_solution_v2.types import ssh_key +from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key from google.cloud.bare_metal_solution_v2.types import volume from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume +from google.cloud.bare_metal_solution_v2.types import volume_snapshot from .client import BareMetalSolutionClient from .transports.base import DEFAULT_CLIENT_INFO, BareMetalSolutionTransport @@ -87,20 +95,62 @@ class BareMetalSolutionAsyncClient: instance_path = staticmethod(BareMetalSolutionClient.instance_path) parse_instance_path = staticmethod(BareMetalSolutionClient.parse_instance_path) + instance_config_path = staticmethod(BareMetalSolutionClient.instance_config_path) + parse_instance_config_path = staticmethod( + BareMetalSolutionClient.parse_instance_config_path + ) + instance_quota_path = staticmethod(BareMetalSolutionClient.instance_quota_path) + parse_instance_quota_path = staticmethod( + BareMetalSolutionClient.parse_instance_quota_path + ) + interconnect_attachment_path = staticmethod( + BareMetalSolutionClient.interconnect_attachment_path + ) + parse_interconnect_attachment_path = staticmethod( + BareMetalSolutionClient.parse_interconnect_attachment_path + ) lun_path = staticmethod(BareMetalSolutionClient.lun_path) parse_lun_path = staticmethod(BareMetalSolutionClient.parse_lun_path) network_path = staticmethod(BareMetalSolutionClient.network_path) parse_network_path = staticmethod(BareMetalSolutionClient.parse_network_path) + network_config_path = staticmethod(BareMetalSolutionClient.network_config_path) + parse_network_config_path = staticmethod( + BareMetalSolutionClient.parse_network_config_path + ) nfs_share_path = staticmethod(BareMetalSolutionClient.nfs_share_path) parse_nfs_share_path = staticmethod(BareMetalSolutionClient.parse_nfs_share_path) + os_image_path = staticmethod(BareMetalSolutionClient.os_image_path) + parse_os_image_path = staticmethod(BareMetalSolutionClient.parse_os_image_path) + provisioning_config_path = staticmethod( + BareMetalSolutionClient.provisioning_config_path + ) + parse_provisioning_config_path = staticmethod( + BareMetalSolutionClient.parse_provisioning_config_path + ) + provisioning_quota_path = staticmethod( + BareMetalSolutionClient.provisioning_quota_path + ) + parse_provisioning_quota_path = staticmethod( + BareMetalSolutionClient.parse_provisioning_quota_path + ) server_network_template_path = staticmethod( BareMetalSolutionClient.server_network_template_path ) parse_server_network_template_path = staticmethod( BareMetalSolutionClient.parse_server_network_template_path ) + ssh_key_path = staticmethod(BareMetalSolutionClient.ssh_key_path) + parse_ssh_key_path = staticmethod(BareMetalSolutionClient.parse_ssh_key_path) volume_path = staticmethod(BareMetalSolutionClient.volume_path) parse_volume_path = staticmethod(BareMetalSolutionClient.parse_volume_path) + volume_config_path = staticmethod(BareMetalSolutionClient.volume_config_path) + parse_volume_config_path = staticmethod( + BareMetalSolutionClient.parse_volume_config_path + ) + volume_snapshot_path = staticmethod(BareMetalSolutionClient.volume_snapshot_path) + parse_volume_snapshot_path = staticmethod( + BareMetalSolutionClient.parse_volume_snapshot_path + ) common_billing_account_path = staticmethod( BareMetalSolutionClient.common_billing_account_path ) @@ -600,6 +650,118 @@ async def sample_update_instance(): # Done; return the response. return response + async def rename_instance( + self, + request: Optional[Union[instance.RenameInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + new_instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instance.Instance: + r"""RenameInstance sets a new name for an instance. + Use with caution, previous names become immediately + invalidated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_rename_instance(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameInstanceRequest( + name="name_value", + new_instance_id="new_instance_id_value", + ) + + # Make the request + response = await client.rename_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.RenameInstanceRequest, dict]]): + The request object. Message requesting rename of a + server. + name (:class:`str`): + Required. The ``name`` field is used to identify the + instance. Format: + projects/{project}/locations/{location}/instances/{instance} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_instance_id (:class:`str`): + Required. The new ``id`` of the instance. + This corresponds to the ``new_instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.Instance: + A server. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = instance.RenameInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_instance_id is not None: + request.new_instance_id = new_instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rename_instance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def reset_instance( self, request: Optional[Union[instance.ResetInstanceRequest, dict]] = None, @@ -944,17 +1106,19 @@ async def sample_stop_instance(): # Done; return the response. return response - async def detach_lun( + async def enable_interactive_serial_console( self, - request: Optional[Union[gcb_instance.DetachLunRequest, dict]] = None, + request: Optional[ + Union[instance.EnableInteractiveSerialConsoleRequest, dict] + ] = None, *, - instance: Optional[str] = None, - lun: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Detach LUN from Instance. + r"""Enable the interactive serial console feature on an + instance. .. code-block:: python @@ -967,18 +1131,17 @@ async def detach_lun( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_detach_lun(): + async def sample_enable_interactive_serial_console(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.DetachLunRequest( - instance="instance_value", - lun="lun_value", + request = bare_metal_solution_v2.EnableInteractiveSerialConsoleRequest( + name="name_value", ) # Make the request - operation = client.detach_lun(request=request) + operation = client.enable_interactive_serial_console(request=request) print("Waiting for operation to complete...") @@ -988,17 +1151,12 @@ async def sample_detach_lun(): print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.DetachLunRequest, dict]]): - The request object. Message for detach specific LUN from - an Instance. - instance (:class:`str`): - Required. Name of the instance. - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lun (:class:`str`): - Required. Name of the Lun to detach. - This corresponds to the ``lun`` field + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleRequest, dict]]): + The request object. Message for enabling the interactive + serial console on an instance. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1012,33 +1170,31 @@ async def sample_detach_lun(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Instance` - A server. + :class:`google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleResponse` + Message for response of EnableInteractiveSerialConsole. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, lun]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = gcb_instance.DetachLunRequest(request) + request = instance.EnableInteractiveSerialConsoleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if instance is not None: - request.instance = instance - if lun is not None: - request.lun = lun + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.detach_lun, + self._client._transport.enable_interactive_serial_console, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1046,7 +1202,7 @@ async def sample_detach_lun(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -1061,23 +1217,26 @@ async def sample_detach_lun(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - gcb_instance.Instance, + instance.EnableInteractiveSerialConsoleResponse, metadata_type=baremetalsolution.OperationMetadata, ) # Done; return the response. return response - async def list_volumes( + async def disable_interactive_serial_console( self, - request: Optional[Union[volume.ListVolumesRequest, dict]] = None, + request: Optional[ + Union[instance.DisableInteractiveSerialConsoleRequest, dict] + ] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListVolumesAsyncPager: - r"""List storage volumes in a given project and location. + ) -> operation_async.AsyncOperation: + r"""Disable the interactive serial console feature on an + instance. .. code-block:: python @@ -1090,31 +1249,32 @@ async def list_volumes( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_list_volumes(): + async def sample_disable_interactive_serial_console(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ListVolumesRequest( - parent="parent_value", + request = bare_metal_solution_v2.DisableInteractiveSerialConsoleRequest( + name="name_value", ) # Make the request - page_result = client.list_volumes(request=request) + operation = client.disable_interactive_serial_console(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListVolumesRequest, dict]]): - The request object. Message for requesting a list of - storage volumes. - parent (:class:`str`): - Required. Parent value for - ListVolumesRequest. - - This corresponds to the ``parent`` field + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleRequest, dict]]): + The request object. Message for disabling the interactive + serial console on an instance. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1124,35 +1284,35 @@ async def sample_list_volumes(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesAsyncPager: - Response message containing the list - of storage volumes. - Iterating over this object will yield - results and resolve additional pages - automatically. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleResponse` + Message for response of DisableInteractiveSerialConsole. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = volume.ListVolumesRequest(request) + request = instance.DisableInteractiveSerialConsoleRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_volumes, + self._client._transport.disable_interactive_serial_console, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1160,7 +1320,7 @@ async def sample_list_volumes(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -1171,28 +1331,28 @@ async def sample_list_volumes(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListVolumesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + instance.DisableInteractiveSerialConsoleResponse, + metadata_type=baremetalsolution.OperationMetadata, ) # Done; return the response. return response - async def get_volume( + async def detach_lun( self, - request: Optional[Union[volume.GetVolumeRequest, dict]] = None, + request: Optional[Union[gcb_instance.DetachLunRequest, dict]] = None, *, - name: Optional[str] = None, + instance: Optional[str] = None, + lun: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> volume.Volume: - r"""Get details of a single storage volume. + ) -> operation_async.AsyncOperation: + r"""Detach LUN from Instance. .. code-block:: python @@ -1205,28 +1365,38 @@ async def get_volume( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_get_volume(): + async def sample_detach_lun(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.GetVolumeRequest( - name="name_value", + request = bare_metal_solution_v2.DetachLunRequest( + instance="instance_value", + lun="lun_value", ) # Make the request - response = await client.get_volume(request=request) + operation = client.detach_lun(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetVolumeRequest, dict]]): - The request object. Message for requesting storage volume - information. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.DetachLunRequest, dict]]): + The request object. Message for detach specific LUN from + an Instance. + instance (:class:`str`): + Required. Name of the instance. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lun (:class:`str`): + Required. Name of the Lun to detach. + This corresponds to the ``lun`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1236,20 +1406,2395 @@ async def sample_get_volume(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.types.Volume: - A storage volume. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.Instance` + A server. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([instance, lun]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcb_instance.DetachLunRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if lun is not None: + request.lun = lun + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.detach_lun, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcb_instance.Instance, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_ssh_keys( + self, + request: Optional[Union[ssh_key.ListSSHKeysRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSSHKeysAsyncPager: + r"""Lists the public SSH keys registered for the + specified project. These SSH keys are used only for the + interactive serial console feature. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_list_ssh_keys(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListSSHKeysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_ssh_keys(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest, dict]]): + The request object. Message for listing the public SSH + keys in a project. + parent (:class:`str`): + Required. The parent containing the + SSH keys. Currently, the only valid + value for the location is "global". + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListSSHKeysAsyncPager: + Message for response of ListSSHKeys. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = ssh_key.ListSSHKeysRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_ssh_keys, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSSHKeysAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_ssh_key( + self, + request: Optional[Union[gcb_ssh_key.CreateSSHKeyRequest, dict]] = None, + *, + parent: Optional[str] = None, + ssh_key: Optional[gcb_ssh_key.SSHKey] = None, + ssh_key_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcb_ssh_key.SSHKey: + r"""Register a public SSH key in the specified project + for use with the interactive serial console feature. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_create_ssh_key(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateSSHKeyRequest( + parent="parent_value", + ssh_key_id="ssh_key_id_value", + ) + + # Make the request + response = await client.create_ssh_key(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.CreateSSHKeyRequest, dict]]): + The request object. Message for registering a public SSH + key in a project. + parent (:class:`str`): + Required. The parent containing the + SSH keys. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssh_key (:class:`google.cloud.bare_metal_solution_v2.types.SSHKey`): + Required. The SSH key to register. + This corresponds to the ``ssh_key`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssh_key_id (:class:`str`): + Required. The ID to use for the key, which will become + the final component of the key's resource name. + + This value must match the regex: [a-zA-Z0-9@.-_]{1,64} + + This corresponds to the ``ssh_key_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.SSHKey: + An SSH key, used for authorizing with + the interactive serial console feature. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, ssh_key, ssh_key_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcb_ssh_key.CreateSSHKeyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if ssh_key is not None: + request.ssh_key = ssh_key + if ssh_key_id is not None: + request.ssh_key_id = ssh_key_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_ssh_key, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_ssh_key( + self, + request: Optional[Union[ssh_key.DeleteSSHKeyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a public SSH key registered in the specified + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_delete_ssh_key(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DeleteSSHKeyRequest( + name="name_value", + ) + + # Make the request + await client.delete_ssh_key(request=request) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.DeleteSSHKeyRequest, dict]]): + The request object. Message for deleting an SSH key from + a project. + name (:class:`str`): + Required. The name of the SSH key to + delete. Currently, the only valid value + for the location is "global". + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = ssh_key.DeleteSSHKeyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_ssh_key, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_volumes( + self, + request: Optional[Union[volume.ListVolumesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVolumesAsyncPager: + r"""List storage volumes in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_list_volumes(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListVolumesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_volumes(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListVolumesRequest, dict]]): + The request object. Message for requesting a list of + storage volumes. + parent (:class:`str`): + Required. Parent value for + ListVolumesRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesAsyncPager: + Response message containing the list + of storage volumes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = volume.ListVolumesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_volumes, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListVolumesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_volume( + self, + request: Optional[Union[volume.GetVolumeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> volume.Volume: + r"""Get details of a single storage volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_get_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetVolumeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_volume(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetVolumeRequest, dict]]): + The request object. Message for requesting storage volume + information. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.Volume: + A storage volume. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = volume.GetVolumeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_volume, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_volume( + self, + request: Optional[Union[gcb_volume.UpdateVolumeRequest, dict]] = None, + *, + volume: Optional[gcb_volume.Volume] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update details of a single storage volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_update_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.UpdateVolumeRequest( + ) + + # Make the request + operation = client.update_volume(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateVolumeRequest, dict]]): + The request object. Message for updating a volume. + volume (:class:`google.cloud.bare_metal_solution_v2.types.Volume`): + Required. The volume to update. + + The ``name`` field is used to identify the volume to + update. Format: + projects/{project}/locations/{location}/volumes/{volume} + + This corresponds to the ``volume`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to update. + The only currently supported fields are: + + 'labels' + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.Volume` + A storage volume. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([volume, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcb_volume.UpdateVolumeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if volume is not None: + request.volume = volume + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_volume, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("volume.name", request.volume.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcb_volume.Volume, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + async def rename_volume( + self, + request: Optional[Union[volume.RenameVolumeRequest, dict]] = None, + *, + name: Optional[str] = None, + new_volume_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> volume.Volume: + r"""RenameVolume sets a new name for a volume. + Use with caution, previous names become immediately + invalidated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_rename_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameVolumeRequest( + name="name_value", + new_volume_id="new_volume_id_value", + ) + + # Make the request + response = await client.rename_volume(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.RenameVolumeRequest, dict]]): + The request object. Message requesting rename of a + server. + name (:class:`str`): + Required. The ``name`` field is used to identify the + volume. Format: + projects/{project}/locations/{location}/volumes/{volume} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_volume_id (:class:`str`): + Required. The new ``id`` of the volume. + This corresponds to the ``new_volume_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.Volume: + A storage volume. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_volume_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = volume.RenameVolumeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_volume_id is not None: + request.new_volume_id = new_volume_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rename_volume, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def evict_volume( + self, + request: Optional[Union[volume.EvictVolumeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Skips volume's cooloff and deletes it now. + Volume must be in cooloff state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_evict_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.EvictVolumeRequest( + name="name_value", + ) + + # Make the request + operation = client.evict_volume(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.EvictVolumeRequest, dict]]): + The request object. Request for skip volume cooloff and + delete it. + name (:class:`str`): + Required. The name of the Volume. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = volume.EvictVolumeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.evict_volume, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + async def resize_volume( + self, + request: Optional[Union[gcb_volume.ResizeVolumeRequest, dict]] = None, + *, + volume: Optional[str] = None, + size_gib: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Emergency Volume resize. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_resize_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ResizeVolumeRequest( + volume="volume_value", + ) + + # Make the request + operation = client.resize_volume(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest, dict]]): + The request object. Request for emergency resize Volume. + volume (:class:`str`): + Required. Volume to resize. + This corresponds to the ``volume`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + size_gib (:class:`int`): + New Volume size, in GiB. + This corresponds to the ``size_gib`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.Volume` + A storage volume. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([volume, size_gib]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcb_volume.ResizeVolumeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if volume is not None: + request.volume = volume + if size_gib is not None: + request.size_gib = size_gib + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resize_volume, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("volume", request.volume),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcb_volume.Volume, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_networks( + self, + request: Optional[Union[network.ListNetworksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNetworksAsyncPager: + r"""List network in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_list_networks(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListNetworksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_networks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListNetworksRequest, dict]]): + The request object. Message for requesting a list of + networks. + parent (:class:`str`): + Required. Parent value for + ListNetworksRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksAsyncPager: + Response message containing the list + of networks. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = network.ListNetworksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_networks, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListNetworksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_network_usage( + self, + request: Optional[Union[network.ListNetworkUsageRequest, dict]] = None, + *, + location: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> network.ListNetworkUsageResponse: + r"""List all Networks (and used IPs for each Network) in + the vendor account associated with the specified + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_list_network_usage(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListNetworkUsageRequest( + location="location_value", + ) + + # Make the request + response = await client.list_network_usage(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest, dict]]): + The request object. Request to get networks with IPs. + location (:class:`str`): + Required. Parent value (project and + location). + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse: + Response with Networks with IPs + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = network.ListNetworkUsageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_network_usage, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_network( + self, + request: Optional[Union[network.GetNetworkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> network.Network: + r"""Get details of a single network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_get_network(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetNetworkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_network(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetNetworkRequest, dict]]): + The request object. Message for requesting network + information. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.Network: + A Network. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = network.GetNetworkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_network, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_network( + self, + request: Optional[Union[gcb_network.UpdateNetworkRequest, dict]] = None, + *, + network: Optional[gcb_network.Network] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update details of a single network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_update_network(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.UpdateNetworkRequest( + ) + + # Make the request + operation = client.update_network(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateNetworkRequest, dict]]): + The request object. Message requesting to updating a + network. + network (:class:`google.cloud.bare_metal_solution_v2.types.Network`): + Required. The network to update. + + The ``name`` field is used to identify the instance to + update. Format: + projects/{project}/locations/{location}/networks/{network} + + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to update. The only currently + supported fields are: ``labels``, ``reservations``, + ``vrf.vlan_attachments`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.Network` + A Network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([network, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcb_network.UpdateNetworkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if network is not None: + request.network = network + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_network, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("network.name", request.network.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcb_network.Network, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_volume_snapshot( + self, + request: Optional[ + Union[gcb_volume_snapshot.CreateVolumeSnapshotRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + volume_snapshot: Optional[gcb_volume_snapshot.VolumeSnapshot] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcb_volume_snapshot.VolumeSnapshot: + r"""Takes a snapshot of a boot volume. Returns INVALID_ARGUMENT if + called for a non-boot volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_create_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateVolumeSnapshotRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_volume_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.CreateVolumeSnapshotRequest, dict]]): + The request object. Message for creating a volume + snapshot. + parent (:class:`str`): + Required. The volume to snapshot. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + volume_snapshot (:class:`google.cloud.bare_metal_solution_v2.types.VolumeSnapshot`): + Required. The snapshot to create. + This corresponds to the ``volume_snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.VolumeSnapshot: + A snapshot of a volume. Only boot + volumes can have snapshots. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, volume_snapshot]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcb_volume_snapshot.CreateVolumeSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if volume_snapshot is not None: + request.volume_snapshot = volume_snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_volume_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def restore_volume_snapshot( + self, + request: Optional[ + Union[gcb_volume_snapshot.RestoreVolumeSnapshotRequest, dict] + ] = None, + *, + volume_snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Uses the specified snapshot to restore its parent volume. + Returns INVALID_ARGUMENT if called for a non-boot volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_restore_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RestoreVolumeSnapshotRequest( + volume_snapshot="volume_snapshot_value", + ) + + # Make the request + operation = client.restore_volume_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.RestoreVolumeSnapshotRequest, dict]]): + The request object. Message for restoring a volume + snapshot. + volume_snapshot (:class:`str`): + Required. Name of the snapshot which + will be used to restore its parent + volume. + + This corresponds to the ``volume_snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.VolumeSnapshot` + A snapshot of a volume. Only boot volumes can have + snapshots. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([volume_snapshot]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if volume_snapshot is not None: + request.volume_snapshot = volume_snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restore_volume_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("volume_snapshot", request.volume_snapshot),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcb_volume_snapshot.VolumeSnapshot, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_volume_snapshot( + self, + request: Optional[ + Union[volume_snapshot.DeleteVolumeSnapshotRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a volume snapshot. Returns INVALID_ARGUMENT if called + for a non-boot volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_delete_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DeleteVolumeSnapshotRequest( + name="name_value", + ) + + # Make the request + await client.delete_volume_snapshot(request=request) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.DeleteVolumeSnapshotRequest, dict]]): + The request object. Message for deleting named Volume + snapshot. + name (:class:`str`): + Required. The name of the snapshot to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = volume_snapshot.DeleteVolumeSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_volume_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_volume_snapshot( + self, + request: Optional[Union[volume_snapshot.GetVolumeSnapshotRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> volume_snapshot.VolumeSnapshot: + r"""Returns the specified snapshot resource. Returns + INVALID_ARGUMENT if called for a non-boot volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_get_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetVolumeSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_volume_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetVolumeSnapshotRequest, dict]]): + The request object. Message for requesting volume + snapshot information. + name (:class:`str`): + Required. The name of the snapshot. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.VolumeSnapshot: + A snapshot of a volume. Only boot + volumes can have snapshots. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = volume_snapshot.GetVolumeSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_volume_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_volume_snapshots( + self, + request: Optional[ + Union[volume_snapshot.ListVolumeSnapshotsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVolumeSnapshotsAsyncPager: + r"""Retrieves the list of snapshots for the specified + volume. Returns a response with an empty list of + snapshots if called for a non-boot volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_list_volume_snapshots(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListVolumeSnapshotsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_volume_snapshots(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest, dict]]): + The request object. Message for requesting a list of + volume snapshots. + parent (:class:`str`): + Required. Parent value for + ListVolumesRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumeSnapshotsAsyncPager: + Response message containing the list + of volume snapshots. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = volume_snapshot.ListVolumeSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_volume_snapshots, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListVolumeSnapshotsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_lun( + self, + request: Optional[Union[lun.GetLunRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> lun.Lun: + r"""Get details of a single storage logical unit + number(LUN). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_get_lun(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetLunRequest( + name="name_value", + ) + + # Make the request + response = await client.get_lun(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetLunRequest, dict]]): + The request object. Message for requesting storage lun + information. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.Lun: + A storage volume logical unit number + (LUN). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = lun.GetLunRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_lun, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_luns( + self, + request: Optional[Union[lun.ListLunsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLunsAsyncPager: + r"""List storage volume luns for given storage volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_list_luns(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListLunsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_luns(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListLunsRequest, dict]]): + The request object. Message for requesting a list of + storage volume luns. + parent (:class:`str`): + Required. Parent value for + ListLunsRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsAsyncPager: + Response message containing the list + of storage volume luns. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = lun.ListLunsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_luns, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLunsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def evict_lun( + self, + request: Optional[Union[lun.EvictLunRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Skips lun's cooloff and deletes it now. + Lun must be in cooloff state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_evict_lun(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.EvictLunRequest( + name="name_value", + ) + + # Make the request + operation = client.evict_lun(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.EvictLunRequest, dict]]): + The request object. Request for skip lun cooloff and + delete it. + name (:class:`str`): + Required. The name of the lun. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = volume.GetVolumeRequest(request) + request = lun.EvictLunRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1259,7 +3804,7 @@ async def sample_get_volume(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_volume, + self._client._transport.evict_lun, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1278,20 +3823,241 @@ async def sample_get_volume(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=baremetalsolution.OperationMetadata, + ) + # Done; return the response. return response - async def update_volume( + async def get_nfs_share( self, - request: Optional[Union[gcb_volume.UpdateVolumeRequest, dict]] = None, + request: Optional[Union[nfs_share.GetNfsShareRequest, dict]] = None, *, - volume: Optional[gcb_volume.Volume] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> nfs_share.NfsShare: + r"""Get details of a single NFS share. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_get_nfs_share(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetNfsShareRequest( + name="name_value", + ) + + # Make the request + response = await client.get_nfs_share(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest, dict]]): + The request object. Message for requesting NFS share + information. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.NfsShare: + An NFS share. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = nfs_share.GetNfsShareRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_nfs_share, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_nfs_shares( + self, + request: Optional[Union[nfs_share.ListNfsSharesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNfsSharesAsyncPager: + r"""List NFS shares. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + async def sample_list_nfs_shares(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListNfsSharesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_nfs_shares(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest, dict]]): + The request object. Message for requesting a list of NFS + shares. + parent (:class:`str`): + Required. Parent value for + ListNfsSharesRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesAsyncPager: + Response message containing the list + of NFS shares. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = nfs_share.ListNfsSharesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_nfs_shares, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListNfsSharesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_nfs_share( + self, + request: Optional[Union[gcb_nfs_share.UpdateNfsShareRequest, dict]] = None, + *, + nfs_share: Optional[gcb_nfs_share.NfsShare] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Update details of a single storage volume. + r"""Update details of a single NFS share. .. code-block:: python @@ -1304,16 +4070,16 @@ async def update_volume( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_update_volume(): + async def sample_update_nfs_share(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateVolumeRequest( + request = bare_metal_solution_v2.UpdateNfsShareRequest( ) # Make the request - operation = client.update_volume(request=request) + operation = client.update_nfs_share(request=request) print("Waiting for operation to complete...") @@ -1322,25 +4088,23 @@ async def sample_update_volume(): # Handle the response print(response) - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateVolumeRequest, dict]]): - The request object. Message for updating a volume. - volume (:class:`google.cloud.bare_metal_solution_v2.types.Volume`): - Required. The volume to update. - - The ``name`` field is used to identify the volume to + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateNfsShareRequest, dict]]): + The request object. Message requesting to updating an NFS + share. + nfs_share (:class:`google.cloud.bare_metal_solution_v2.types.NfsShare`): + Required. The NFS share to update. + + The ``name`` field is used to identify the NFS share to update. Format: - projects/{project}/locations/{location}/volumes/{volume} + projects/{project}/locations/{location}/nfsShares/{nfs_share} - This corresponds to the ``volume`` field + This corresponds to the ``nfs_share`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): The list of fields to update. The only currently - supported fields are: ``snapshot_auto_delete_behavior`` - ``snapshot_schedule_policy_name`` 'labels' - 'snapshot_enabled' - 'snapshot_reservation_detail.reserved_space_percent' + supported fields are: ``labels`` ``allowed_clients`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1356,33 +4120,33 @@ async def sample_update_volume(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Volume` - A storage volume. + :class:`google.cloud.bare_metal_solution_v2.types.NfsShare` + An NFS share. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([volume, update_mask]) + has_flattened_params = any([nfs_share, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = gcb_volume.UpdateVolumeRequest(request) + request = gcb_nfs_share.UpdateNfsShareRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if volume is not None: - request.volume = volume + if nfs_share is not None: + request.nfs_share = nfs_share if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_volume, + self._client._transport.update_nfs_share, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1391,7 +4155,7 @@ async def sample_update_volume(): # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("volume.name", request.volume.name),) + (("nfs_share.name", request.nfs_share.name),) ), ) @@ -1407,24 +4171,24 @@ async def sample_update_volume(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - gcb_volume.Volume, + gcb_nfs_share.NfsShare, metadata_type=baremetalsolution.OperationMetadata, ) # Done; return the response. return response - async def resize_volume( + async def create_nfs_share( self, - request: Optional[Union[gcb_volume.ResizeVolumeRequest, dict]] = None, + request: Optional[Union[gcb_nfs_share.CreateNfsShareRequest, dict]] = None, *, - volume: Optional[str] = None, - size_gib: Optional[int] = None, + parent: Optional[str] = None, + nfs_share: Optional[gcb_nfs_share.NfsShare] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Emergency Volume resize. + r"""Create an NFS share. .. code-block:: python @@ -1437,17 +4201,17 @@ async def resize_volume( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_resize_volume(): + async def sample_create_nfs_share(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ResizeVolumeRequest( - volume="volume_value", + request = bare_metal_solution_v2.CreateNfsShareRequest( + parent="parent_value", ) # Make the request - operation = client.resize_volume(request=request) + operation = client.create_nfs_share(request=request) print("Waiting for operation to complete...") @@ -1457,16 +4221,18 @@ async def sample_resize_volume(): print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest, dict]]): - The request object. Request for emergency resize Volume. - volume (:class:`str`): - Required. Volume to resize. - This corresponds to the ``volume`` field + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.CreateNfsShareRequest, dict]]): + The request object. Message for creating an NFS share. + parent (:class:`str`): + Required. The parent project and + location. + + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - size_gib (:class:`int`): - New Volume size, in GiB. - This corresponds to the ``size_gib`` field + nfs_share (:class:`google.cloud.bare_metal_solution_v2.types.NfsShare`): + Required. The NfsShare to create. + This corresponds to the ``nfs_share`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1480,33 +4246,33 @@ async def sample_resize_volume(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Volume` - A storage volume. + :class:`google.cloud.bare_metal_solution_v2.types.NfsShare` + An NFS share. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([volume, size_gib]) + has_flattened_params = any([parent, nfs_share]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = gcb_volume.ResizeVolumeRequest(request) + request = gcb_nfs_share.CreateNfsShareRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if volume is not None: - request.volume = volume - if size_gib is not None: - request.size_gib = size_gib + if parent is not None: + request.parent = parent + if nfs_share is not None: + request.nfs_share = nfs_share # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.resize_volume, + self._client._transport.create_nfs_share, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1514,7 +4280,7 @@ async def sample_resize_volume(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("volume", request.volume),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -1529,23 +4295,26 @@ async def sample_resize_volume(): response = operation_async.from_gapic( response, self._client._transport.operations_client, - gcb_volume.Volume, + gcb_nfs_share.NfsShare, metadata_type=baremetalsolution.OperationMetadata, ) # Done; return the response. return response - async def list_networks( + async def rename_nfs_share( self, - request: Optional[Union[network.ListNetworksRequest, dict]] = None, + request: Optional[Union[nfs_share.RenameNfsShareRequest, dict]] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, + new_nfsshare_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNetworksAsyncPager: - r"""List network in a given project and location. + ) -> nfs_share.NfsShare: + r"""RenameNfsShare sets a new name for an nfsshare. + Use with caution, previous names become immediately + invalidated. .. code-block:: python @@ -1558,31 +4327,37 @@ async def list_networks( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_list_networks(): + async def sample_rename_nfs_share(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworksRequest( - parent="parent_value", + request = bare_metal_solution_v2.RenameNfsShareRequest( + name="name_value", + new_nfsshare_id="new_nfsshare_id_value", ) # Make the request - page_result = client.list_networks(request=request) + response = await client.rename_nfs_share(request=request) # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListNetworksRequest, dict]]): - The request object. Message for requesting a list of - networks. - parent (:class:`str`): - Required. Parent value for - ListNetworksRequest. + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.RenameNfsShareRequest, dict]]): + The request object. Message requesting rename of a + server. + name (:class:`str`): + Required. The ``name`` field is used to identify the + nfsshare. Format: + projects/{project}/locations/{location}/nfsshares/{nfsshare} - This corresponds to the ``parent`` field + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_nfsshare_id (:class:`str`): + Required. The new ``id`` of the nfsshare. + This corresponds to the ``new_nfsshare_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1592,35 +4367,32 @@ async def sample_list_networks(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksAsyncPager: - Response message containing the list - of networks. - Iterating over this object will yield - results and resolve additional pages - automatically. - + google.cloud.bare_metal_solution_v2.types.NfsShare: + An NFS share. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name, new_nfsshare_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = network.ListNetworksRequest(request) + request = nfs_share.RenameNfsShareRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name + if new_nfsshare_id is not None: + request.new_nfsshare_id = new_nfsshare_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_networks, + self._client._transport.rename_nfs_share, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1628,7 +4400,7 @@ async def sample_list_networks(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -1639,30 +4411,20 @@ async def sample_list_networks(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListNetworksAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - # Done; return the response. return response - async def list_network_usage( + async def delete_nfs_share( self, - request: Optional[Union[network.ListNetworkUsageRequest, dict]] = None, + request: Optional[Union[nfs_share.DeleteNfsShareRequest, dict]] = None, *, - location: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> network.ListNetworkUsageResponse: - r"""List all Networks (and used IPs for each Network) in - the vendor account associated with the specified - project. + ) -> operation_async.AsyncOperation: + r"""Delete an NFS share. The underlying volume is + automatically deleted. .. code-block:: python @@ -1675,29 +4437,33 @@ async def list_network_usage( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_list_network_usage(): + async def sample_delete_nfs_share(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworkUsageRequest( - location="location_value", + request = bare_metal_solution_v2.DeleteNfsShareRequest( + name="name_value", ) # Make the request - response = await client.list_network_usage(request=request) + operation = client.delete_nfs_share(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest, dict]]): - The request object. Request to get networks with IPs. - location (:class:`str`): - Required. Parent value (project and - location). + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.DeleteNfsShareRequest, dict]]): + The request object. Message for deleting an NFS share. + name (:class:`str`): + Required. The name of the NFS share + to delete. - This corresponds to the ``location`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1707,30 +4473,42 @@ async def sample_list_network_usage(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse: - Response with Networks with IPs + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([location]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = network.ListNetworkUsageRequest(request) + request = nfs_share.DeleteNfsShareRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if location is not None: - request.location = location + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_network_usage, + self._client._transport.delete_nfs_share, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1738,7 +4516,7 @@ async def sample_list_network_usage(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -1749,19 +4527,30 @@ async def sample_list_network_usage(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=baremetalsolution.OperationMetadata, + ) + # Done; return the response. return response - async def get_network( + async def list_provisioning_quotas( self, - request: Optional[Union[network.GetNetworkRequest, dict]] = None, + request: Optional[ + Union[provisioning.ListProvisioningQuotasRequest, dict] + ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> network.Network: - r"""Get details of a single network. + ) -> pagers.ListProvisioningQuotasAsyncPager: + r"""List the budget details to provision resources on a + given project. .. code-block:: python @@ -1774,28 +4563,31 @@ async def get_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_get_network(): + async def sample_list_provisioning_quotas(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.GetNetworkRequest( - name="name_value", + request = bare_metal_solution_v2.ListProvisioningQuotasRequest( + parent="parent_value", ) # Make the request - response = await client.get_network(request=request) + page_result = client.list_provisioning_quotas(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetNetworkRequest, dict]]): - The request object. Message for requesting network - information. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest, dict]]): + The request object. Message for requesting the list of + provisioning quotas. + parent (:class:`str`): + Required. Parent value for + ListProvisioningQuotasRequest. + + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1805,30 +4597,35 @@ async def sample_get_network(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.types.Network: - A Network. + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListProvisioningQuotasAsyncPager: + Response message for the list of + provisioning quotas. + Iterating over this object will yield + results and resolve additional pages + automatically. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = network.GetNetworkRequest(request) + request = provisioning.ListProvisioningQuotasRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_network, + self._client._transport.list_provisioning_quotas, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1836,7 +4633,7 @@ async def sample_get_network(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -1847,20 +4644,32 @@ async def sample_get_network(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListProvisioningQuotasAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + # Done; return the response. return response - async def update_network( + async def submit_provisioning_config( self, - request: Optional[Union[gcb_network.UpdateNetworkRequest, dict]] = None, + request: Optional[ + Union[provisioning.SubmitProvisioningConfigRequest, dict] + ] = None, *, - network: Optional[gcb_network.Network] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, + provisioning_config: Optional[provisioning.ProvisioningConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Update details of a single network. + ) -> provisioning.SubmitProvisioningConfigResponse: + r"""Submit a provisiong configuration for a given + project. .. code-block:: python @@ -1873,43 +4682,37 @@ async def update_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_update_network(): + async def sample_submit_provisioning_config(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNetworkRequest( + request = bare_metal_solution_v2.SubmitProvisioningConfigRequest( + parent="parent_value", ) - # Make the request - operation = client.update_network(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateNetworkRequest, dict]]): - The request object. Message requesting to updating a - network. - network (:class:`google.cloud.bare_metal_solution_v2.types.Network`): - Required. The network to update. + # Make the request + response = await client.submit_provisioning_config(request=request) - The ``name`` field is used to identify the instance to - update. Format: - projects/{project}/locations/{location}/networks/{network} + # Handle the response + print(response) - This corresponds to the ``network`` field + Args: + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigRequest, dict]]): + The request object. Request for SubmitProvisioningConfig. + parent (:class:`str`): + Required. The parent project and + location containing the + ProvisioningConfig. + + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to update. The only currently - supported fields are: ``labels``, ``reservations`` + provisioning_config (:class:`google.cloud.bare_metal_solution_v2.types.ProvisioningConfig`): + Required. The ProvisioningConfig to + create. - This corresponds to the ``update_mask`` field + This corresponds to the ``provisioning_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1919,37 +4722,34 @@ async def sample_update_network(): sent along with the request as metadata. Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Network` - A Network. + google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigResponse: + Response for + SubmitProvisioningConfig. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([network, update_mask]) + has_flattened_params = any([parent, provisioning_config]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = gcb_network.UpdateNetworkRequest(request) + request = provisioning.SubmitProvisioningConfigRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if network is not None: - request.network = network - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent + if provisioning_config is not None: + request.provisioning_config = provisioning_config # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_network, + self._client._transport.submit_provisioning_config, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1957,9 +4757,7 @@ async def sample_update_network(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("network.name", request.network.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -1970,28 +4768,21 @@ async def sample_update_network(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcb_network.Network, - metadata_type=baremetalsolution.OperationMetadata, - ) - # Done; return the response. return response - async def get_lun( + async def get_provisioning_config( self, - request: Optional[Union[lun.GetLunRequest, dict]] = None, + request: Optional[ + Union[provisioning.GetProvisioningConfigRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> lun.Lun: - r"""Get details of a single storage logical unit - number(LUN). + ) -> provisioning.ProvisioningConfig: + r"""Get ProvisioningConfig by name. .. code-block:: python @@ -2004,27 +4795,28 @@ async def get_lun( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_get_lun(): + async def sample_get_provisioning_config(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.GetLunRequest( + request = bare_metal_solution_v2.GetProvisioningConfigRequest( name="name_value", ) # Make the request - response = await client.get_lun(request=request) + response = await client.get_provisioning_config(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetLunRequest, dict]]): - The request object. Message for requesting storage lun - information. + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetProvisioningConfigRequest, dict]]): + The request object. Request for GetProvisioningConfig. name (:class:`str`): - Required. Name of the resource. + Required. Name of the + ProvisioningConfig. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2035,10 +4827,8 @@ async def sample_get_lun(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.types.Lun: - A storage volume logical unit number - (LUN). - + google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: + A provisioning configuration. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2050,7 +4840,7 @@ async def sample_get_lun(): "the individual field arguments should be set." ) - request = lun.GetLunRequest(request) + request = provisioning.GetProvisioningConfigRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2060,7 +4850,7 @@ async def sample_get_lun(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_lun, + self._client._transport.get_provisioning_config, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2082,16 +4872,19 @@ async def sample_get_lun(): # Done; return the response. return response - async def list_luns( + async def create_provisioning_config( self, - request: Optional[Union[lun.ListLunsRequest, dict]] = None, + request: Optional[ + Union[provisioning.CreateProvisioningConfigRequest, dict] + ] = None, *, parent: Optional[str] = None, + provisioning_config: Optional[provisioning.ProvisioningConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLunsAsyncPager: - r"""List storage volume luns for given storage volume. + ) -> provisioning.ProvisioningConfig: + r"""Create new ProvisioningConfig. .. code-block:: python @@ -2104,33 +4897,39 @@ async def list_luns( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_list_luns(): + async def sample_create_provisioning_config(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ListLunsRequest( + request = bare_metal_solution_v2.CreateProvisioningConfigRequest( parent="parent_value", ) # Make the request - page_result = client.list_luns(request=request) + response = await client.create_provisioning_config(request=request) # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListLunsRequest, dict]]): - The request object. Message for requesting a list of - storage volume luns. + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.CreateProvisioningConfigRequest, dict]]): + The request object. Request for CreateProvisioningConfig. parent (:class:`str`): - Required. Parent value for - ListLunsRequest. + Required. The parent project and + location containing the + ProvisioningConfig. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + provisioning_config (:class:`google.cloud.bare_metal_solution_v2.types.ProvisioningConfig`): + Required. The ProvisioningConfig to + create. + + This corresponds to the ``provisioning_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2138,35 +4937,32 @@ async def sample_list_luns(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsAsyncPager: - Response message containing the list - of storage volume luns. - Iterating over this object will yield - results and resolve additional pages - automatically. - + google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: + A provisioning configuration. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([parent, provisioning_config]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = lun.ListLunsRequest(request) + request = provisioning.CreateProvisioningConfigRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if provisioning_config is not None: + request.provisioning_config = provisioning_config # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_luns, + self._client._transport.create_provisioning_config, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2185,28 +4981,22 @@ async def sample_list_luns(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListLunsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - # Done; return the response. return response - async def get_nfs_share( + async def update_provisioning_config( self, - request: Optional[Union[nfs_share.GetNfsShareRequest, dict]] = None, + request: Optional[ + Union[provisioning.UpdateProvisioningConfigRequest, dict] + ] = None, *, - name: Optional[str] = None, + provisioning_config: Optional[provisioning.ProvisioningConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> nfs_share.NfsShare: - r"""Get details of a single NFS share. + ) -> provisioning.ProvisioningConfig: + r"""Update existing ProvisioningConfig. .. code-block:: python @@ -2219,28 +5009,36 @@ async def get_nfs_share( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_get_nfs_share(): + async def sample_update_provisioning_config(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.GetNfsShareRequest( - name="name_value", + request = bare_metal_solution_v2.UpdateProvisioningConfigRequest( ) # Make the request - response = await client.get_nfs_share(request=request) + response = await client.update_provisioning_config(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest, dict]]): - The request object. Message for requesting NFS share - information. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateProvisioningConfigRequest, dict]]): + The request object. Message for updating a + ProvisioningConfig. + provisioning_config (:class:`google.cloud.bare_metal_solution_v2.types.ProvisioningConfig`): + Required. The ProvisioningConfig to + update. + + This corresponds to the ``provisioning_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2250,30 +5048,32 @@ async def sample_get_nfs_share(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.types.NfsShare: - An NFS share. + google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: + A provisioning configuration. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([provisioning_config, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = nfs_share.GetNfsShareRequest(request) + request = provisioning.UpdateProvisioningConfigRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if provisioning_config is not None: + request.provisioning_config = provisioning_config + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_nfs_share, + self._client._transport.update_provisioning_config, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2281,7 +5081,9 @@ async def sample_get_nfs_share(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + (("provisioning_config.name", request.provisioning_config.name),) + ), ) # Send the request. @@ -2295,16 +5097,19 @@ async def sample_get_nfs_share(): # Done; return the response. return response - async def list_nfs_shares( + async def rename_network( self, - request: Optional[Union[nfs_share.ListNfsSharesRequest, dict]] = None, + request: Optional[Union[network.RenameNetworkRequest, dict]] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, + new_network_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNfsSharesAsyncPager: - r"""List NFS shares. + ) -> network.Network: + r"""RenameNetwork sets a new name for a network. + Use with caution, previous names become immediately + invalidated. .. code-block:: python @@ -2317,31 +5122,37 @@ async def list_nfs_shares( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_list_nfs_shares(): + async def sample_rename_network(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ListNfsSharesRequest( - parent="parent_value", + request = bare_metal_solution_v2.RenameNetworkRequest( + name="name_value", + new_network_id="new_network_id_value", ) # Make the request - page_result = client.list_nfs_shares(request=request) + response = await client.rename_network(request=request) # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest, dict]]): - The request object. Message for requesting a list of NFS - shares. - parent (:class:`str`): - Required. Parent value for - ListNfsSharesRequest. + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.RenameNetworkRequest, dict]]): + The request object. Message requesting rename of a + server. + name (:class:`str`): + Required. The ``name`` field is used to identify the + network. Format: + projects/{project}/locations/{location}/networks/{network} - This corresponds to the ``parent`` field + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_network_id (:class:`str`): + Required. The new ``id`` of the network. + This corresponds to the ``new_network_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2351,35 +5162,32 @@ async def sample_list_nfs_shares(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesAsyncPager: - Response message containing the list - of NFS shares. - Iterating over this object will yield - results and resolve additional pages - automatically. - + google.cloud.bare_metal_solution_v2.types.Network: + A Network. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name, new_network_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = nfs_share.ListNfsSharesRequest(request) + request = network.RenameNetworkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name + if new_network_id is not None: + request.new_network_id = new_network_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_nfs_shares, + self._client._transport.rename_network, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2387,7 +5195,7 @@ async def sample_list_nfs_shares(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -2398,29 +5206,20 @@ async def sample_list_nfs_shares(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListNfsSharesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - # Done; return the response. return response - async def update_nfs_share( + async def list_os_images( self, - request: Optional[Union[gcb_nfs_share.UpdateNfsShareRequest, dict]] = None, + request: Optional[Union[osimage.ListOSImagesRequest, dict]] = None, *, - nfs_share: Optional[gcb_nfs_share.NfsShare] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Update details of a single NFS share. + ) -> pagers.ListOSImagesAsyncPager: + r"""Retrieves the list of OS images which are currently + approved. .. code-block:: python @@ -2433,43 +5232,31 @@ async def update_nfs_share( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - async def sample_update_nfs_share(): + async def sample_list_os_images(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNfsShareRequest( + request = bare_metal_solution_v2.ListOSImagesRequest( + parent="parent_value", ) # Make the request - operation = client.update_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() + page_result = client.list_os_images(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateNfsShareRequest, dict]]): - The request object. Message requesting to updating a NFS - share. - nfs_share (:class:`google.cloud.bare_metal_solution_v2.types.NfsShare`): - Required. The NFS share to update. - - The ``name`` field is used to identify the NFS share to - update. Format: - projects/{project}/locations/{location}/nfsShares/{nfs_share} - - This corresponds to the ``nfs_share`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to update. The only currently - supported fields are: ``labels`` + request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest, dict]]): + The request object. Request for getting all available OS + images. + parent (:class:`str`): + Required. Parent value for + ListProvisioningQuotasRequest. - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2479,37 +5266,35 @@ async def sample_update_nfs_share(): sent along with the request as metadata. Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.NfsShare` - An NFS share. + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListOSImagesAsyncPager: + Request for getting all available OS + images. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([nfs_share, update_mask]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = gcb_nfs_share.UpdateNfsShareRequest(request) + request = osimage.ListOSImagesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if nfs_share is not None: - request.nfs_share = nfs_share - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_nfs_share, + self._client._transport.list_os_images, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2517,9 +5302,7 @@ async def sample_update_nfs_share(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("nfs_share.name", request.nfs_share.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -2530,12 +5313,13 @@ async def sample_update_nfs_share(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcb_nfs_share.NfsShare, - metadata_type=baremetalsolution.OperationMetadata, + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOSImagesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py index 7f353618672b..48699cc07d6d 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py @@ -52,20 +52,28 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.bare_metal_solution_v2.services.bare_metal_solution import pagers from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share -from google.cloud.bare_metal_solution_v2.types import baremetalsolution +from google.cloud.bare_metal_solution_v2.types import ( + volume_snapshot as gcb_volume_snapshot, +) +from google.cloud.bare_metal_solution_v2.types import baremetalsolution, common from google.cloud.bare_metal_solution_v2.types import instance from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance from google.cloud.bare_metal_solution_v2.types import lun from google.cloud.bare_metal_solution_v2.types import network from google.cloud.bare_metal_solution_v2.types import network as gcb_network from google.cloud.bare_metal_solution_v2.types import nfs_share +from google.cloud.bare_metal_solution_v2.types import osimage, provisioning +from google.cloud.bare_metal_solution_v2.types import ssh_key +from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key from google.cloud.bare_metal_solution_v2.types import volume from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume +from google.cloud.bare_metal_solution_v2.types import volume_snapshot from .transports.base import DEFAULT_CLIENT_INFO, BareMetalSolutionTransport from .transports.grpc import BareMetalSolutionGrpcTransport @@ -226,6 +234,72 @@ def parse_instance_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def instance_config_path( + project: str, + location: str, + instance_config: str, + ) -> str: + """Returns a fully-qualified instance_config string.""" + return "projects/{project}/locations/{location}/instanceConfigs/{instance_config}".format( + project=project, + location=location, + instance_config=instance_config, + ) + + @staticmethod + def parse_instance_config_path(path: str) -> Dict[str, str]: + """Parses a instance_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instanceConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def instance_quota_path( + project: str, + location: str, + instance_quota: str, + ) -> str: + """Returns a fully-qualified instance_quota string.""" + return "projects/{project}/locations/{location}/instanceQuotas/{instance_quota}".format( + project=project, + location=location, + instance_quota=instance_quota, + ) + + @staticmethod + def parse_instance_quota_path(path: str) -> Dict[str, str]: + """Parses a instance_quota path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instanceQuotas/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def interconnect_attachment_path( + project: str, + region: str, + interconnect_attachment: str, + ) -> str: + """Returns a fully-qualified interconnect_attachment string.""" + return "projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}".format( + project=project, + region=region, + interconnect_attachment=interconnect_attachment, + ) + + @staticmethod + def parse_interconnect_attachment_path(path: str) -> Dict[str, str]: + """Parses a interconnect_attachment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/interconnectAttachments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def lun_path( project: str, @@ -272,6 +346,28 @@ def parse_network_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def network_config_path( + project: str, + location: str, + network_config: str, + ) -> str: + """Returns a fully-qualified network_config string.""" + return "projects/{project}/locations/{location}/networkConfigs/{network_config}".format( + project=project, + location=location, + network_config=network_config, + ) + + @staticmethod + def parse_network_config_path(path: str) -> Dict[str, str]: + """Parses a network_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/networkConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def nfs_share_path( project: str, @@ -294,6 +390,72 @@ def parse_nfs_share_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def os_image_path( + project: str, + location: str, + os_image: str, + ) -> str: + """Returns a fully-qualified os_image string.""" + return "projects/{project}/locations/{location}/osImages/{os_image}".format( + project=project, + location=location, + os_image=os_image, + ) + + @staticmethod + def parse_os_image_path(path: str) -> Dict[str, str]: + """Parses a os_image path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/osImages/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def provisioning_config_path( + project: str, + location: str, + provisioning_config: str, + ) -> str: + """Returns a fully-qualified provisioning_config string.""" + return "projects/{project}/locations/{location}/provisioningConfigs/{provisioning_config}".format( + project=project, + location=location, + provisioning_config=provisioning_config, + ) + + @staticmethod + def parse_provisioning_config_path(path: str) -> Dict[str, str]: + """Parses a provisioning_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/provisioningConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def provisioning_quota_path( + project: str, + location: str, + provisioning_quota: str, + ) -> str: + """Returns a fully-qualified provisioning_quota string.""" + return "projects/{project}/locations/{location}/provisioningQuotas/{provisioning_quota}".format( + project=project, + location=location, + provisioning_quota=provisioning_quota, + ) + + @staticmethod + def parse_provisioning_quota_path(path: str) -> Dict[str, str]: + """Parses a provisioning_quota path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/provisioningQuotas/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def server_network_template_path( project: str, @@ -316,6 +478,28 @@ def parse_server_network_template_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def ssh_key_path( + project: str, + location: str, + ssh_key: str, + ) -> str: + """Returns a fully-qualified ssh_key string.""" + return "projects/{project}/locations/{location}/sshKeys/{ssh_key}".format( + project=project, + location=location, + ssh_key=ssh_key, + ) + + @staticmethod + def parse_ssh_key_path(path: str) -> Dict[str, str]: + """Parses a ssh_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/sshKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def volume_path( project: str, @@ -338,6 +522,52 @@ def parse_volume_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def volume_config_path( + project: str, + location: str, + volume_config: str, + ) -> str: + """Returns a fully-qualified volume_config string.""" + return "projects/{project}/locations/{location}/volumeConfigs/{volume_config}".format( + project=project, + location=location, + volume_config=volume_config, + ) + + @staticmethod + def parse_volume_config_path(path: str) -> Dict[str, str]: + """Parses a volume_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/volumeConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def volume_snapshot_path( + project: str, + location: str, + volume: str, + snapshot: str, + ) -> str: + """Returns a fully-qualified volume_snapshot string.""" + return "projects/{project}/locations/{location}/volumes/{volume}/snapshots/{snapshot}".format( + project=project, + location=location, + volume=volume, + snapshot=snapshot, + ) + + @staticmethod + def parse_volume_snapshot_path(path: str) -> Dict[str, str]: + """Parses a volume_snapshot path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/volumes/(?P.+?)/snapshots/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -927,6 +1157,118 @@ def sample_update_instance(): # Done; return the response. return response + def rename_instance( + self, + request: Optional[Union[instance.RenameInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + new_instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instance.Instance: + r"""RenameInstance sets a new name for an instance. + Use with caution, previous names become immediately + invalidated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_rename_instance(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameInstanceRequest( + name="name_value", + new_instance_id="new_instance_id_value", + ) + + # Make the request + response = client.rename_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.RenameInstanceRequest, dict]): + The request object. Message requesting rename of a + server. + name (str): + Required. The ``name`` field is used to identify the + instance. Format: + projects/{project}/locations/{location}/instances/{instance} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_instance_id (str): + Required. The new ``id`` of the instance. + This corresponds to the ``new_instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.Instance: + A server. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a instance.RenameInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, instance.RenameInstanceRequest): + request = instance.RenameInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_instance_id is not None: + request.new_instance_id = new_instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rename_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def reset_instance( self, request: Optional[Union[instance.ResetInstanceRequest, dict]] = None, @@ -1271,17 +1613,19 @@ def sample_stop_instance(): # Done; return the response. return response - def detach_lun( + def enable_interactive_serial_console( self, - request: Optional[Union[gcb_instance.DetachLunRequest, dict]] = None, + request: Optional[ + Union[instance.EnableInteractiveSerialConsoleRequest, dict] + ] = None, *, - instance: Optional[str] = None, - lun: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Detach LUN from Instance. + r"""Enable the interactive serial console feature on an + instance. .. code-block:: python @@ -1294,18 +1638,17 @@ def detach_lun( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_detach_lun(): + def sample_enable_interactive_serial_console(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.DetachLunRequest( - instance="instance_value", - lun="lun_value", + request = bare_metal_solution_v2.EnableInteractiveSerialConsoleRequest( + name="name_value", ) # Make the request - operation = client.detach_lun(request=request) + operation = client.enable_interactive_serial_console(request=request) print("Waiting for operation to complete...") @@ -1315,17 +1658,12 @@ def sample_detach_lun(): print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.DetachLunRequest, dict]): - The request object. Message for detach specific LUN from - an Instance. - instance (str): - Required. Name of the instance. - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lun (str): - Required. Name of the Lun to detach. - This corresponds to the ``lun`` field + request (Union[google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleRequest, dict]): + The request object. Message for enabling the interactive + serial console on an instance. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1339,14 +1677,2628 @@ def sample_detach_lun(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Instance` + :class:`google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleResponse` + Message for response of EnableInteractiveSerialConsole. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a instance.EnableInteractiveSerialConsoleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, instance.EnableInteractiveSerialConsoleRequest): + request = instance.EnableInteractiveSerialConsoleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.enable_interactive_serial_console + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.EnableInteractiveSerialConsoleResponse, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + def disable_interactive_serial_console( + self, + request: Optional[ + Union[instance.DisableInteractiveSerialConsoleRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Disable the interactive serial console feature on an + instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_disable_interactive_serial_console(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DisableInteractiveSerialConsoleRequest( + name="name_value", + ) + + # Make the request + operation = client.disable_interactive_serial_console(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleRequest, dict]): + The request object. Message for disabling the interactive + serial console on an instance. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleResponse` + Message for response of DisableInteractiveSerialConsole. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a instance.DisableInteractiveSerialConsoleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, instance.DisableInteractiveSerialConsoleRequest): + request = instance.DisableInteractiveSerialConsoleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.disable_interactive_serial_console + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + instance.DisableInteractiveSerialConsoleResponse, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + def detach_lun( + self, + request: Optional[Union[gcb_instance.DetachLunRequest, dict]] = None, + *, + instance: Optional[str] = None, + lun: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Detach LUN from Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_detach_lun(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DetachLunRequest( + instance="instance_value", + lun="lun_value", + ) + + # Make the request + operation = client.detach_lun(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.DetachLunRequest, dict]): + The request object. Message for detach specific LUN from + an Instance. + instance (str): + Required. Name of the instance. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lun (str): + Required. Name of the Lun to detach. + This corresponds to the ``lun`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.Instance` A server. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, lun]) + has_flattened_params = any([instance, lun]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcb_instance.DetachLunRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcb_instance.DetachLunRequest): + request = gcb_instance.DetachLunRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if lun is not None: + request.lun = lun + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detach_lun] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcb_instance.Instance, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_ssh_keys( + self, + request: Optional[Union[ssh_key.ListSSHKeysRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSSHKeysPager: + r"""Lists the public SSH keys registered for the + specified project. These SSH keys are used only for the + interactive serial console feature. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_list_ssh_keys(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListSSHKeysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_ssh_keys(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest, dict]): + The request object. Message for listing the public SSH + keys in a project. + parent (str): + Required. The parent containing the + SSH keys. Currently, the only valid + value for the location is "global". + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListSSHKeysPager: + Message for response of ListSSHKeys. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a ssh_key.ListSSHKeysRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, ssh_key.ListSSHKeysRequest): + request = ssh_key.ListSSHKeysRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_ssh_keys] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSSHKeysPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_ssh_key( + self, + request: Optional[Union[gcb_ssh_key.CreateSSHKeyRequest, dict]] = None, + *, + parent: Optional[str] = None, + ssh_key: Optional[gcb_ssh_key.SSHKey] = None, + ssh_key_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcb_ssh_key.SSHKey: + r"""Register a public SSH key in the specified project + for use with the interactive serial console feature. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_create_ssh_key(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateSSHKeyRequest( + parent="parent_value", + ssh_key_id="ssh_key_id_value", + ) + + # Make the request + response = client.create_ssh_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.CreateSSHKeyRequest, dict]): + The request object. Message for registering a public SSH + key in a project. + parent (str): + Required. The parent containing the + SSH keys. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssh_key (google.cloud.bare_metal_solution_v2.types.SSHKey): + Required. The SSH key to register. + This corresponds to the ``ssh_key`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ssh_key_id (str): + Required. The ID to use for the key, which will become + the final component of the key's resource name. + + This value must match the regex: [a-zA-Z0-9@.-_]{1,64} + + This corresponds to the ``ssh_key_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.SSHKey: + An SSH key, used for authorizing with + the interactive serial console feature. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, ssh_key, ssh_key_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcb_ssh_key.CreateSSHKeyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcb_ssh_key.CreateSSHKeyRequest): + request = gcb_ssh_key.CreateSSHKeyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if ssh_key is not None: + request.ssh_key = ssh_key + if ssh_key_id is not None: + request.ssh_key_id = ssh_key_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_ssh_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_ssh_key( + self, + request: Optional[Union[ssh_key.DeleteSSHKeyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a public SSH key registered in the specified + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_delete_ssh_key(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DeleteSSHKeyRequest( + name="name_value", + ) + + # Make the request + client.delete_ssh_key(request=request) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.DeleteSSHKeyRequest, dict]): + The request object. Message for deleting an SSH key from + a project. + name (str): + Required. The name of the SSH key to + delete. Currently, the only valid value + for the location is "global". + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a ssh_key.DeleteSSHKeyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, ssh_key.DeleteSSHKeyRequest): + request = ssh_key.DeleteSSHKeyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_ssh_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_volumes( + self, + request: Optional[Union[volume.ListVolumesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVolumesPager: + r"""List storage volumes in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_list_volumes(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListVolumesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_volumes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.ListVolumesRequest, dict]): + The request object. Message for requesting a list of + storage volumes. + parent (str): + Required. Parent value for + ListVolumesRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesPager: + Response message containing the list + of storage volumes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a volume.ListVolumesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, volume.ListVolumesRequest): + request = volume.ListVolumesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_volumes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListVolumesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_volume( + self, + request: Optional[Union[volume.GetVolumeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> volume.Volume: + r"""Get details of a single storage volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_get_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetVolumeRequest( + name="name_value", + ) + + # Make the request + response = client.get_volume(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.GetVolumeRequest, dict]): + The request object. Message for requesting storage volume + information. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.Volume: + A storage volume. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a volume.GetVolumeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, volume.GetVolumeRequest): + request = volume.GetVolumeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_volume] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_volume( + self, + request: Optional[Union[gcb_volume.UpdateVolumeRequest, dict]] = None, + *, + volume: Optional[gcb_volume.Volume] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update details of a single storage volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_update_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.UpdateVolumeRequest( + ) + + # Make the request + operation = client.update_volume(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.UpdateVolumeRequest, dict]): + The request object. Message for updating a volume. + volume (google.cloud.bare_metal_solution_v2.types.Volume): + Required. The volume to update. + + The ``name`` field is used to identify the volume to + update. Format: + projects/{project}/locations/{location}/volumes/{volume} + + This corresponds to the ``volume`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + The only currently supported fields are: + + 'labels' + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.Volume` + A storage volume. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([volume, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcb_volume.UpdateVolumeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcb_volume.UpdateVolumeRequest): + request = gcb_volume.UpdateVolumeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if volume is not None: + request.volume = volume + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_volume] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("volume.name", request.volume.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcb_volume.Volume, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + def rename_volume( + self, + request: Optional[Union[volume.RenameVolumeRequest, dict]] = None, + *, + name: Optional[str] = None, + new_volume_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> volume.Volume: + r"""RenameVolume sets a new name for a volume. + Use with caution, previous names become immediately + invalidated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_rename_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameVolumeRequest( + name="name_value", + new_volume_id="new_volume_id_value", + ) + + # Make the request + response = client.rename_volume(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.RenameVolumeRequest, dict]): + The request object. Message requesting rename of a + server. + name (str): + Required. The ``name`` field is used to identify the + volume. Format: + projects/{project}/locations/{location}/volumes/{volume} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_volume_id (str): + Required. The new ``id`` of the volume. + This corresponds to the ``new_volume_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.Volume: + A storage volume. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_volume_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a volume.RenameVolumeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, volume.RenameVolumeRequest): + request = volume.RenameVolumeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_volume_id is not None: + request.new_volume_id = new_volume_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rename_volume] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def evict_volume( + self, + request: Optional[Union[volume.EvictVolumeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Skips volume's cooloff and deletes it now. + Volume must be in cooloff state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_evict_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.EvictVolumeRequest( + name="name_value", + ) + + # Make the request + operation = client.evict_volume(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.EvictVolumeRequest, dict]): + The request object. Request for skip volume cooloff and + delete it. + name (str): + Required. The name of the Volume. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a volume.EvictVolumeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, volume.EvictVolumeRequest): + request = volume.EvictVolumeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.evict_volume] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + def resize_volume( + self, + request: Optional[Union[gcb_volume.ResizeVolumeRequest, dict]] = None, + *, + volume: Optional[str] = None, + size_gib: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Emergency Volume resize. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_resize_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ResizeVolumeRequest( + volume="volume_value", + ) + + # Make the request + operation = client.resize_volume(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest, dict]): + The request object. Request for emergency resize Volume. + volume (str): + Required. Volume to resize. + This corresponds to the ``volume`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + size_gib (int): + New Volume size, in GiB. + This corresponds to the ``size_gib`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.Volume` + A storage volume. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([volume, size_gib]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcb_volume.ResizeVolumeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcb_volume.ResizeVolumeRequest): + request = gcb_volume.ResizeVolumeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if volume is not None: + request.volume = volume + if size_gib is not None: + request.size_gib = size_gib + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize_volume] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("volume", request.volume),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcb_volume.Volume, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_networks( + self, + request: Optional[Union[network.ListNetworksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNetworksPager: + r"""List network in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_list_networks(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListNetworksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_networks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.ListNetworksRequest, dict]): + The request object. Message for requesting a list of + networks. + parent (str): + Required. Parent value for + ListNetworksRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksPager: + Response message containing the list + of networks. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a network.ListNetworksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, network.ListNetworksRequest): + request = network.ListNetworksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_networks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNetworksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_network_usage( + self, + request: Optional[Union[network.ListNetworkUsageRequest, dict]] = None, + *, + location: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> network.ListNetworkUsageResponse: + r"""List all Networks (and used IPs for each Network) in + the vendor account associated with the specified + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_list_network_usage(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListNetworkUsageRequest( + location="location_value", + ) + + # Make the request + response = client.list_network_usage(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest, dict]): + The request object. Request to get networks with IPs. + location (str): + Required. Parent value (project and + location). + + This corresponds to the ``location`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse: + Response with Networks with IPs + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([location]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a network.ListNetworkUsageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, network.ListNetworkUsageRequest): + request = network.ListNetworkUsageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if location is not None: + request.location = location + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_network_usage] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_network( + self, + request: Optional[Union[network.GetNetworkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> network.Network: + r"""Get details of a single network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_get_network(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetNetworkRequest( + name="name_value", + ) + + # Make the request + response = client.get_network(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.GetNetworkRequest, dict]): + The request object. Message for requesting network + information. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.Network: + A Network. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a network.GetNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, network.GetNetworkRequest): + request = network.GetNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_network] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_network( + self, + request: Optional[Union[gcb_network.UpdateNetworkRequest, dict]] = None, + *, + network: Optional[gcb_network.Network] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update details of a single network. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_update_network(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.UpdateNetworkRequest( + ) + + # Make the request + operation = client.update_network(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.UpdateNetworkRequest, dict]): + The request object. Message requesting to updating a + network. + network (google.cloud.bare_metal_solution_v2.types.Network): + Required. The network to update. + + The ``name`` field is used to identify the instance to + update. Format: + projects/{project}/locations/{location}/networks/{network} + + This corresponds to the ``network`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. The only currently + supported fields are: ``labels``, ``reservations``, + ``vrf.vlan_attachments`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.Network` + A Network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([network, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcb_network.UpdateNetworkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcb_network.UpdateNetworkRequest): + request = gcb_network.UpdateNetworkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if network is not None: + request.network = network + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_network] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("network.name", request.network.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcb_network.Network, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_volume_snapshot( + self, + request: Optional[ + Union[gcb_volume_snapshot.CreateVolumeSnapshotRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + volume_snapshot: Optional[gcb_volume_snapshot.VolumeSnapshot] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcb_volume_snapshot.VolumeSnapshot: + r"""Takes a snapshot of a boot volume. Returns INVALID_ARGUMENT if + called for a non-boot volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_create_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateVolumeSnapshotRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_volume_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.CreateVolumeSnapshotRequest, dict]): + The request object. Message for creating a volume + snapshot. + parent (str): + Required. The volume to snapshot. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + volume_snapshot (google.cloud.bare_metal_solution_v2.types.VolumeSnapshot): + Required. The snapshot to create. + This corresponds to the ``volume_snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.VolumeSnapshot: + A snapshot of a volume. Only boot + volumes can have snapshots. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, volume_snapshot]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcb_volume_snapshot.CreateVolumeSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcb_volume_snapshot.CreateVolumeSnapshotRequest): + request = gcb_volume_snapshot.CreateVolumeSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if volume_snapshot is not None: + request.volume_snapshot = volume_snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_volume_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def restore_volume_snapshot( + self, + request: Optional[ + Union[gcb_volume_snapshot.RestoreVolumeSnapshotRequest, dict] + ] = None, + *, + volume_snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Uses the specified snapshot to restore its parent volume. + Returns INVALID_ARGUMENT if called for a non-boot volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_restore_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RestoreVolumeSnapshotRequest( + volume_snapshot="volume_snapshot_value", + ) + + # Make the request + operation = client.restore_volume_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.RestoreVolumeSnapshotRequest, dict]): + The request object. Message for restoring a volume + snapshot. + volume_snapshot (str): + Required. Name of the snapshot which + will be used to restore its parent + volume. + + This corresponds to the ``volume_snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bare_metal_solution_v2.types.VolumeSnapshot` + A snapshot of a volume. Only boot volumes can have + snapshots. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([volume_snapshot]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcb_volume_snapshot.RestoreVolumeSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcb_volume_snapshot.RestoreVolumeSnapshotRequest): + request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if volume_snapshot is not None: + request.volume_snapshot = volume_snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_volume_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("volume_snapshot", request.volume_snapshot),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcb_volume_snapshot.VolumeSnapshot, + metadata_type=baremetalsolution.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_volume_snapshot( + self, + request: Optional[ + Union[volume_snapshot.DeleteVolumeSnapshotRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a volume snapshot. Returns INVALID_ARGUMENT if called + for a non-boot volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_delete_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DeleteVolumeSnapshotRequest( + name="name_value", + ) + + # Make the request + client.delete_volume_snapshot(request=request) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.DeleteVolumeSnapshotRequest, dict]): + The request object. Message for deleting named Volume + snapshot. + name (str): + Required. The name of the snapshot to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a volume_snapshot.DeleteVolumeSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, volume_snapshot.DeleteVolumeSnapshotRequest): + request = volume_snapshot.DeleteVolumeSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_volume_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_volume_snapshot( + self, + request: Optional[Union[volume_snapshot.GetVolumeSnapshotRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> volume_snapshot.VolumeSnapshot: + r"""Returns the specified snapshot resource. Returns + INVALID_ARGUMENT if called for a non-boot volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_get_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetVolumeSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_volume_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.GetVolumeSnapshotRequest, dict]): + The request object. Message for requesting volume + snapshot information. + name (str): + Required. The name of the snapshot. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.VolumeSnapshot: + A snapshot of a volume. Only boot + volumes can have snapshots. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a volume_snapshot.GetVolumeSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, volume_snapshot.GetVolumeSnapshotRequest): + request = volume_snapshot.GetVolumeSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_volume_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_volume_snapshots( + self, + request: Optional[ + Union[volume_snapshot.ListVolumeSnapshotsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVolumeSnapshotsPager: + r"""Retrieves the list of snapshots for the specified + volume. Returns a response with an empty list of + snapshots if called for a non-boot volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_list_volume_snapshots(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListVolumeSnapshotsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_volume_snapshots(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest, dict]): + The request object. Message for requesting a list of + volume snapshots. + parent (str): + Required. Parent value for + ListVolumesRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumeSnapshotsPager: + Response message containing the list + of volume snapshots. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a volume_snapshot.ListVolumeSnapshotsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, volume_snapshot.ListVolumeSnapshotsRequest): + request = volume_snapshot.ListVolumeSnapshotsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_volume_snapshots] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListVolumeSnapshotsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_lun( + self, + request: Optional[Union[lun.GetLunRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> lun.Lun: + r"""Get details of a single storage logical unit + number(LUN). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_get_lun(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetLunRequest( + name="name_value", + ) + + # Make the request + response = client.get_lun(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.GetLunRequest, dict]): + The request object. Message for requesting storage lun + information. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.types.Lun: + A storage volume logical unit number + (LUN). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a lun.GetLunRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, lun.GetLunRequest): + request = lun.GetLunRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_lun] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_luns( + self, + request: Optional[Union[lun.ListLunsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLunsPager: + r"""List storage volume luns for given storage volume. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_list_luns(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListLunsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_luns(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.ListLunsRequest, dict]): + The request object. Message for requesting a list of + storage volume luns. + parent (str): + Required. Parent value for + ListLunsRequest. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsPager: + Response message containing the list + of storage volume luns. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a lun.ListLunsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, lun.ListLunsRequest): + request = lun.ListLunsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_luns] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLunsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def evict_lun( + self, + request: Optional[Union[lun.EvictLunRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Skips lun's cooloff and deletes it now. + Lun must be in cooloff state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bare_metal_solution_v2 + + def sample_evict_lun(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.EvictLunRequest( + name="name_value", + ) + + # Make the request + operation = client.evict_lun(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bare_metal_solution_v2.types.EvictLunRequest, dict]): + The request object. Request for skip lun cooloff and + delete it. + name (str): + Required. The name of the lun. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1354,26 +4306,24 @@ def sample_detach_lun(): ) # Minor optimization to avoid making a copy if the user passes - # in a gcb_instance.DetachLunRequest. + # in a lun.EvictLunRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, gcb_instance.DetachLunRequest): - request = gcb_instance.DetachLunRequest(request) + if not isinstance(request, lun.EvictLunRequest): + request = lun.EvictLunRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if instance is not None: - request.instance = instance - if lun is not None: - request.lun = lun + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.detach_lun] + rpc = self._transport._wrapped_methods[self._transport.evict_lun] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -1388,23 +4338,23 @@ def sample_detach_lun(): response = operation.from_gapic( response, self._transport.operations_client, - gcb_instance.Instance, + empty_pb2.Empty, metadata_type=baremetalsolution.OperationMetadata, ) # Done; return the response. return response - def list_volumes( + def get_nfs_share( self, - request: Optional[Union[volume.ListVolumesRequest, dict]] = None, + request: Optional[Union[nfs_share.GetNfsShareRequest, dict]] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListVolumesPager: - r"""List storage volumes in a given project and location. + ) -> nfs_share.NfsShare: + r"""Get details of a single NFS share. .. code-block:: python @@ -1417,31 +4367,28 @@ def list_volumes( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_list_volumes(): + def sample_get_nfs_share(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ListVolumesRequest( - parent="parent_value", + request = bare_metal_solution_v2.GetNfsShareRequest( + name="name_value", ) # Make the request - page_result = client.list_volumes(request=request) + response = client.get_nfs_share(request=request) # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListVolumesRequest, dict]): - The request object. Message for requesting a list of - storage volumes. - parent (str): - Required. Parent value for - ListVolumesRequest. - - This corresponds to the ``parent`` field + request (Union[google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest, dict]): + The request object. Message for requesting NFS share + information. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1451,18 +4398,13 @@ def sample_list_volumes(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesPager: - Response message containing the list - of storage volumes. - Iterating over this object will yield - results and resolve additional pages - automatically. - + google.cloud.bare_metal_solution_v2.types.NfsShare: + An NFS share. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1470,24 +4412,24 @@ def sample_list_volumes(): ) # Minor optimization to avoid making a copy if the user passes - # in a volume.ListVolumesRequest. + # in a nfs_share.GetNfsShareRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, volume.ListVolumesRequest): - request = volume.ListVolumesRequest(request) + if not isinstance(request, nfs_share.GetNfsShareRequest): + request = nfs_share.GetNfsShareRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_volumes] + rpc = self._transport._wrapped_methods[self._transport.get_nfs_share] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -1498,28 +4440,19 @@ def sample_list_volumes(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListVolumesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - # Done; return the response. return response - def get_volume( + def list_nfs_shares( self, - request: Optional[Union[volume.GetVolumeRequest, dict]] = None, + request: Optional[Union[nfs_share.ListNfsSharesRequest, dict]] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> volume.Volume: - r"""Get details of a single storage volume. + ) -> pagers.ListNfsSharesPager: + r"""List NFS shares. .. code-block:: python @@ -1532,28 +4465,31 @@ def get_volume( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_get_volume(): + def sample_list_nfs_shares(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.GetVolumeRequest( - name="name_value", + request = bare_metal_solution_v2.ListNfsSharesRequest( + parent="parent_value", ) # Make the request - response = client.get_volume(request=request) + page_result = client.list_nfs_shares(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.GetVolumeRequest, dict]): - The request object. Message for requesting storage volume - information. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field + request (Union[google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest, dict]): + The request object. Message for requesting a list of NFS + shares. + parent (str): + Required. Parent value for + ListNfsSharesRequest. + + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1563,13 +4499,18 @@ def sample_get_volume(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.types.Volume: - A storage volume. + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesPager: + Response message containing the list + of NFS shares. + Iterating over this object will yield + results and resolve additional pages + automatically. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1577,24 +4518,24 @@ def sample_get_volume(): ) # Minor optimization to avoid making a copy if the user passes - # in a volume.GetVolumeRequest. + # in a nfs_share.ListNfsSharesRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, volume.GetVolumeRequest): - request = volume.GetVolumeRequest(request) + if not isinstance(request, nfs_share.ListNfsSharesRequest): + request = nfs_share.ListNfsSharesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_volume] + rpc = self._transport._wrapped_methods[self._transport.list_nfs_shares] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -1605,20 +4546,29 @@ def sample_get_volume(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNfsSharesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + # Done; return the response. return response - def update_volume( + def update_nfs_share( self, - request: Optional[Union[gcb_volume.UpdateVolumeRequest, dict]] = None, + request: Optional[Union[gcb_nfs_share.UpdateNfsShareRequest, dict]] = None, *, - volume: Optional[gcb_volume.Volume] = None, + nfs_share: Optional[gcb_nfs_share.NfsShare] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Update details of a single storage volume. + r"""Update details of a single NFS share. .. code-block:: python @@ -1631,16 +4581,16 @@ def update_volume( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_update_volume(): + def sample_update_nfs_share(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateVolumeRequest( + request = bare_metal_solution_v2.UpdateNfsShareRequest( ) # Make the request - operation = client.update_volume(request=request) + operation = client.update_nfs_share(request=request) print("Waiting for operation to complete...") @@ -1650,24 +4600,22 @@ def sample_update_volume(): print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.UpdateVolumeRequest, dict]): - The request object. Message for updating a volume. - volume (google.cloud.bare_metal_solution_v2.types.Volume): - Required. The volume to update. + request (Union[google.cloud.bare_metal_solution_v2.types.UpdateNfsShareRequest, dict]): + The request object. Message requesting to updating an NFS + share. + nfs_share (google.cloud.bare_metal_solution_v2.types.NfsShare): + Required. The NFS share to update. - The ``name`` field is used to identify the volume to + The ``name`` field is used to identify the NFS share to update. Format: - projects/{project}/locations/{location}/volumes/{volume} + projects/{project}/locations/{location}/nfsShares/{nfs_share} - This corresponds to the ``volume`` field + This corresponds to the ``nfs_share`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): The list of fields to update. The only currently - supported fields are: ``snapshot_auto_delete_behavior`` - ``snapshot_schedule_policy_name`` 'labels' - 'snapshot_enabled' - 'snapshot_reservation_detail.reserved_space_percent' + supported fields are: ``labels`` ``allowed_clients`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1683,14 +4631,14 @@ def sample_update_volume(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Volume` - A storage volume. + :class:`google.cloud.bare_metal_solution_v2.types.NfsShare` + An NFS share. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([volume, update_mask]) + has_flattened_params = any([nfs_share, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1698,27 +4646,27 @@ def sample_update_volume(): ) # Minor optimization to avoid making a copy if the user passes - # in a gcb_volume.UpdateVolumeRequest. + # in a gcb_nfs_share.UpdateNfsShareRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, gcb_volume.UpdateVolumeRequest): - request = gcb_volume.UpdateVolumeRequest(request) + if not isinstance(request, gcb_nfs_share.UpdateNfsShareRequest): + request = gcb_nfs_share.UpdateNfsShareRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if volume is not None: - request.volume = volume + if nfs_share is not None: + request.nfs_share = nfs_share if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_volume] + rpc = self._transport._wrapped_methods[self._transport.update_nfs_share] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("volume.name", request.volume.name),) + (("nfs_share.name", request.nfs_share.name),) ), ) @@ -1734,24 +4682,24 @@ def sample_update_volume(): response = operation.from_gapic( response, self._transport.operations_client, - gcb_volume.Volume, + gcb_nfs_share.NfsShare, metadata_type=baremetalsolution.OperationMetadata, ) # Done; return the response. return response - def resize_volume( + def create_nfs_share( self, - request: Optional[Union[gcb_volume.ResizeVolumeRequest, dict]] = None, + request: Optional[Union[gcb_nfs_share.CreateNfsShareRequest, dict]] = None, *, - volume: Optional[str] = None, - size_gib: Optional[int] = None, + parent: Optional[str] = None, + nfs_share: Optional[gcb_nfs_share.NfsShare] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Emergency Volume resize. + r"""Create an NFS share. .. code-block:: python @@ -1764,17 +4712,17 @@ def resize_volume( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_resize_volume(): + def sample_create_nfs_share(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ResizeVolumeRequest( - volume="volume_value", + request = bare_metal_solution_v2.CreateNfsShareRequest( + parent="parent_value", ) # Make the request - operation = client.resize_volume(request=request) + operation = client.create_nfs_share(request=request) print("Waiting for operation to complete...") @@ -1784,16 +4732,18 @@ def sample_resize_volume(): print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest, dict]): - The request object. Request for emergency resize Volume. - volume (str): - Required. Volume to resize. - This corresponds to the ``volume`` field + request (Union[google.cloud.bare_metal_solution_v2.types.CreateNfsShareRequest, dict]): + The request object. Message for creating an NFS share. + parent (str): + Required. The parent project and + location. + + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - size_gib (int): - New Volume size, in GiB. - This corresponds to the ``size_gib`` field + nfs_share (google.cloud.bare_metal_solution_v2.types.NfsShare): + Required. The NfsShare to create. + This corresponds to the ``nfs_share`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1807,14 +4757,14 @@ def sample_resize_volume(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Volume` - A storage volume. + :class:`google.cloud.bare_metal_solution_v2.types.NfsShare` + An NFS share. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([volume, size_gib]) + has_flattened_params = any([parent, nfs_share]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1822,26 +4772,26 @@ def sample_resize_volume(): ) # Minor optimization to avoid making a copy if the user passes - # in a gcb_volume.ResizeVolumeRequest. + # in a gcb_nfs_share.CreateNfsShareRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, gcb_volume.ResizeVolumeRequest): - request = gcb_volume.ResizeVolumeRequest(request) + if not isinstance(request, gcb_nfs_share.CreateNfsShareRequest): + request = gcb_nfs_share.CreateNfsShareRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if volume is not None: - request.volume = volume - if size_gib is not None: - request.size_gib = size_gib + if parent is not None: + request.parent = parent + if nfs_share is not None: + request.nfs_share = nfs_share # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.resize_volume] + rpc = self._transport._wrapped_methods[self._transport.create_nfs_share] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("volume", request.volume),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -1856,23 +4806,26 @@ def sample_resize_volume(): response = operation.from_gapic( response, self._transport.operations_client, - gcb_volume.Volume, + gcb_nfs_share.NfsShare, metadata_type=baremetalsolution.OperationMetadata, ) # Done; return the response. return response - def list_networks( + def rename_nfs_share( self, - request: Optional[Union[network.ListNetworksRequest, dict]] = None, + request: Optional[Union[nfs_share.RenameNfsShareRequest, dict]] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, + new_nfsshare_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNetworksPager: - r"""List network in a given project and location. + ) -> nfs_share.NfsShare: + r"""RenameNfsShare sets a new name for an nfsshare. + Use with caution, previous names become immediately + invalidated. .. code-block:: python @@ -1885,31 +4838,37 @@ def list_networks( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_list_networks(): + def sample_rename_nfs_share(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworksRequest( - parent="parent_value", + request = bare_metal_solution_v2.RenameNfsShareRequest( + name="name_value", + new_nfsshare_id="new_nfsshare_id_value", ) # Make the request - page_result = client.list_networks(request=request) + response = client.rename_nfs_share(request=request) # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListNetworksRequest, dict]): - The request object. Message for requesting a list of - networks. - parent (str): - Required. Parent value for - ListNetworksRequest. + request (Union[google.cloud.bare_metal_solution_v2.types.RenameNfsShareRequest, dict]): + The request object. Message requesting rename of a + server. + name (str): + Required. The ``name`` field is used to identify the + nfsshare. Format: + projects/{project}/locations/{location}/nfsshares/{nfsshare} - This corresponds to the ``parent`` field + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_nfsshare_id (str): + Required. The new ``id`` of the nfsshare. + This corresponds to the ``new_nfsshare_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1919,18 +4878,13 @@ def sample_list_networks(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksPager: - Response message containing the list - of networks. - Iterating over this object will yield - results and resolve additional pages - automatically. - + google.cloud.bare_metal_solution_v2.types.NfsShare: + An NFS share. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name, new_nfsshare_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1938,24 +4892,26 @@ def sample_list_networks(): ) # Minor optimization to avoid making a copy if the user passes - # in a network.ListNetworksRequest. + # in a nfs_share.RenameNfsShareRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, network.ListNetworksRequest): - request = network.ListNetworksRequest(request) + if not isinstance(request, nfs_share.RenameNfsShareRequest): + request = nfs_share.RenameNfsShareRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name + if new_nfsshare_id is not None: + request.new_nfsshare_id = new_nfsshare_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_networks] + rpc = self._transport._wrapped_methods[self._transport.rename_nfs_share] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -1966,30 +4922,20 @@ def sample_list_networks(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListNetworksPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - # Done; return the response. return response - def list_network_usage( + def delete_nfs_share( self, - request: Optional[Union[network.ListNetworkUsageRequest, dict]] = None, + request: Optional[Union[nfs_share.DeleteNfsShareRequest, dict]] = None, *, - location: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> network.ListNetworkUsageResponse: - r"""List all Networks (and used IPs for each Network) in - the vendor account associated with the specified - project. + ) -> operation.Operation: + r"""Delete an NFS share. The underlying volume is + automatically deleted. .. code-block:: python @@ -2002,29 +4948,33 @@ def list_network_usage( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_list_network_usage(): + def sample_delete_nfs_share(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworkUsageRequest( - location="location_value", + request = bare_metal_solution_v2.DeleteNfsShareRequest( + name="name_value", ) # Make the request - response = client.list_network_usage(request=request) + operation = client.delete_nfs_share(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest, dict]): - The request object. Request to get networks with IPs. - location (str): - Required. Parent value (project and - location). + request (Union[google.cloud.bare_metal_solution_v2.types.DeleteNfsShareRequest, dict]): + The request object. Message for deleting an NFS share. + name (str): + Required. The name of the NFS share + to delete. - This corresponds to the ``location`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2034,13 +4984,25 @@ def sample_list_network_usage(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse: - Response with Networks with IPs + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([location]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2048,24 +5010,24 @@ def sample_list_network_usage(): ) # Minor optimization to avoid making a copy if the user passes - # in a network.ListNetworkUsageRequest. + # in a nfs_share.DeleteNfsShareRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, network.ListNetworkUsageRequest): - request = network.ListNetworkUsageRequest(request) + if not isinstance(request, nfs_share.DeleteNfsShareRequest): + request = nfs_share.DeleteNfsShareRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if location is not None: - request.location = location + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_network_usage] + rpc = self._transport._wrapped_methods[self._transport.delete_nfs_share] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -2076,19 +5038,30 @@ def sample_list_network_usage(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=baremetalsolution.OperationMetadata, + ) + # Done; return the response. return response - def get_network( + def list_provisioning_quotas( self, - request: Optional[Union[network.GetNetworkRequest, dict]] = None, + request: Optional[ + Union[provisioning.ListProvisioningQuotasRequest, dict] + ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> network.Network: - r"""Get details of a single network. + ) -> pagers.ListProvisioningQuotasPager: + r"""List the budget details to provision resources on a + given project. .. code-block:: python @@ -2101,28 +5074,31 @@ def get_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_get_network(): + def sample_list_provisioning_quotas(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.GetNetworkRequest( - name="name_value", + request = bare_metal_solution_v2.ListProvisioningQuotasRequest( + parent="parent_value", ) # Make the request - response = client.get_network(request=request) + page_result = client.list_provisioning_quotas(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.GetNetworkRequest, dict]): - The request object. Message for requesting network - information. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field + request (Union[google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest, dict]): + The request object. Message for requesting the list of + provisioning quotas. + parent (str): + Required. Parent value for + ListProvisioningQuotasRequest. + + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2132,13 +5108,18 @@ def sample_get_network(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.types.Network: - A Network. + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListProvisioningQuotasPager: + Response message for the list of + provisioning quotas. + Iterating over this object will yield + results and resolve additional pages + automatically. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2146,24 +5127,24 @@ def sample_get_network(): ) # Minor optimization to avoid making a copy if the user passes - # in a network.GetNetworkRequest. + # in a provisioning.ListProvisioningQuotasRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, network.GetNetworkRequest): - request = network.GetNetworkRequest(request) + if not isinstance(request, provisioning.ListProvisioningQuotasRequest): + request = provisioning.ListProvisioningQuotasRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_network] + rpc = self._transport._wrapped_methods[self._transport.list_provisioning_quotas] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -2174,20 +5155,32 @@ def sample_get_network(): metadata=metadata, ) + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListProvisioningQuotasPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + # Done; return the response. return response - def update_network( + def submit_provisioning_config( self, - request: Optional[Union[gcb_network.UpdateNetworkRequest, dict]] = None, + request: Optional[ + Union[provisioning.SubmitProvisioningConfigRequest, dict] + ] = None, *, - network: Optional[gcb_network.Network] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, + provisioning_config: Optional[provisioning.ProvisioningConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Update details of a single network. + ) -> provisioning.SubmitProvisioningConfigResponse: + r"""Submit a provisiong configuration for a given + project. .. code-block:: python @@ -2200,43 +5193,37 @@ def update_network( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_update_network(): + def sample_submit_provisioning_config(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNetworkRequest( + request = bare_metal_solution_v2.SubmitProvisioningConfigRequest( + parent="parent_value", ) # Make the request - operation = client.update_network(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() + response = client.submit_provisioning_config(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.UpdateNetworkRequest, dict]): - The request object. Message requesting to updating a - network. - network (google.cloud.bare_metal_solution_v2.types.Network): - Required. The network to update. - - The ``name`` field is used to identify the instance to - update. Format: - projects/{project}/locations/{location}/networks/{network} + request (Union[google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigRequest, dict]): + The request object. Request for SubmitProvisioningConfig. + parent (str): + Required. The parent project and + location containing the + ProvisioningConfig. - This corresponds to the ``network`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. The only currently - supported fields are: ``labels``, ``reservations`` + should not be set. + provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): + Required. The ProvisioningConfig to + create. - This corresponds to the ``update_mask`` field + This corresponds to the ``provisioning_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2246,18 +5233,15 @@ def sample_update_network(): sent along with the request as metadata. Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Network` - A Network. + google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigResponse: + Response for + SubmitProvisioningConfig. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([network, update_mask]) + has_flattened_params = any([parent, provisioning_config]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2265,28 +5249,28 @@ def sample_update_network(): ) # Minor optimization to avoid making a copy if the user passes - # in a gcb_network.UpdateNetworkRequest. + # in a provisioning.SubmitProvisioningConfigRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, gcb_network.UpdateNetworkRequest): - request = gcb_network.UpdateNetworkRequest(request) + if not isinstance(request, provisioning.SubmitProvisioningConfigRequest): + request = provisioning.SubmitProvisioningConfigRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if network is not None: - request.network = network - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent + if provisioning_config is not None: + request.provisioning_config = provisioning_config # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_network] + rpc = self._transport._wrapped_methods[ + self._transport.submit_provisioning_config + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("network.name", request.network.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -2297,28 +5281,21 @@ def sample_update_network(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcb_network.Network, - metadata_type=baremetalsolution.OperationMetadata, - ) - # Done; return the response. return response - def get_lun( + def get_provisioning_config( self, - request: Optional[Union[lun.GetLunRequest, dict]] = None, + request: Optional[ + Union[provisioning.GetProvisioningConfigRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> lun.Lun: - r"""Get details of a single storage logical unit - number(LUN). + ) -> provisioning.ProvisioningConfig: + r"""Get ProvisioningConfig by name. .. code-block:: python @@ -2331,27 +5308,28 @@ def get_lun( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_get_lun(): + def sample_get_provisioning_config(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.GetLunRequest( + request = bare_metal_solution_v2.GetProvisioningConfigRequest( name="name_value", ) # Make the request - response = client.get_lun(request=request) + response = client.get_provisioning_config(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.GetLunRequest, dict]): - The request object. Message for requesting storage lun - information. + request (Union[google.cloud.bare_metal_solution_v2.types.GetProvisioningConfigRequest, dict]): + The request object. Request for GetProvisioningConfig. name (str): - Required. Name of the resource. + Required. Name of the + ProvisioningConfig. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2362,10 +5340,8 @@ def sample_get_lun(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.types.Lun: - A storage volume logical unit number - (LUN). - + google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: + A provisioning configuration. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2378,11 +5354,11 @@ def sample_get_lun(): ) # Minor optimization to avoid making a copy if the user passes - # in a lun.GetLunRequest. + # in a provisioning.GetProvisioningConfigRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, lun.GetLunRequest): - request = lun.GetLunRequest(request) + if not isinstance(request, provisioning.GetProvisioningConfigRequest): + request = provisioning.GetProvisioningConfigRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -2390,7 +5366,7 @@ def sample_get_lun(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_lun] + rpc = self._transport._wrapped_methods[self._transport.get_provisioning_config] # Certain fields should be provided within the metadata header; # add these here. @@ -2409,16 +5385,19 @@ def sample_get_lun(): # Done; return the response. return response - def list_luns( + def create_provisioning_config( self, - request: Optional[Union[lun.ListLunsRequest, dict]] = None, + request: Optional[ + Union[provisioning.CreateProvisioningConfigRequest, dict] + ] = None, *, parent: Optional[str] = None, + provisioning_config: Optional[provisioning.ProvisioningConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListLunsPager: - r"""List storage volume luns for given storage volume. + ) -> provisioning.ProvisioningConfig: + r"""Create new ProvisioningConfig. .. code-block:: python @@ -2431,33 +5410,39 @@ def list_luns( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_list_luns(): + def sample_create_provisioning_config(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ListLunsRequest( + request = bare_metal_solution_v2.CreateProvisioningConfigRequest( parent="parent_value", ) # Make the request - page_result = client.list_luns(request=request) + response = client.create_provisioning_config(request=request) # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListLunsRequest, dict]): - The request object. Message for requesting a list of - storage volume luns. + request (Union[google.cloud.bare_metal_solution_v2.types.CreateProvisioningConfigRequest, dict]): + The request object. Request for CreateProvisioningConfig. parent (str): - Required. Parent value for - ListLunsRequest. + Required. The parent project and + location containing the + ProvisioningConfig. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): + Required. The ProvisioningConfig to + create. + + This corresponds to the ``provisioning_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2465,18 +5450,13 @@ def sample_list_luns(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsPager: - Response message containing the list - of storage volume luns. - Iterating over this object will yield - results and resolve additional pages - automatically. - + google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: + A provisioning configuration. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([parent, provisioning_config]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2484,19 +5464,23 @@ def sample_list_luns(): ) # Minor optimization to avoid making a copy if the user passes - # in a lun.ListLunsRequest. + # in a provisioning.CreateProvisioningConfigRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, lun.ListLunsRequest): - request = lun.ListLunsRequest(request) + if not isinstance(request, provisioning.CreateProvisioningConfigRequest): + request = provisioning.CreateProvisioningConfigRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if provisioning_config is not None: + request.provisioning_config = provisioning_config # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_luns] + rpc = self._transport._wrapped_methods[ + self._transport.create_provisioning_config + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2512,28 +5496,22 @@ def sample_list_luns(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListLunsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - # Done; return the response. return response - def get_nfs_share( + def update_provisioning_config( self, - request: Optional[Union[nfs_share.GetNfsShareRequest, dict]] = None, + request: Optional[ + Union[provisioning.UpdateProvisioningConfigRequest, dict] + ] = None, *, - name: Optional[str] = None, + provisioning_config: Optional[provisioning.ProvisioningConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> nfs_share.NfsShare: - r"""Get details of a single NFS share. + ) -> provisioning.ProvisioningConfig: + r"""Update existing ProvisioningConfig. .. code-block:: python @@ -2546,28 +5524,36 @@ def get_nfs_share( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_get_nfs_share(): + def sample_update_provisioning_config(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.GetNfsShareRequest( - name="name_value", + request = bare_metal_solution_v2.UpdateProvisioningConfigRequest( ) # Make the request - response = client.get_nfs_share(request=request) + response = client.update_provisioning_config(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest, dict]): - The request object. Message for requesting NFS share - information. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field + request (Union[google.cloud.bare_metal_solution_v2.types.UpdateProvisioningConfigRequest, dict]): + The request object. Message for updating a + ProvisioningConfig. + provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): + Required. The ProvisioningConfig to + update. + + This corresponds to the ``provisioning_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2577,13 +5563,13 @@ def sample_get_nfs_share(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.types.NfsShare: - An NFS share. + google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: + A provisioning configuration. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + has_flattened_params = any([provisioning_config, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2591,24 +5577,30 @@ def sample_get_nfs_share(): ) # Minor optimization to avoid making a copy if the user passes - # in a nfs_share.GetNfsShareRequest. + # in a provisioning.UpdateProvisioningConfigRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, nfs_share.GetNfsShareRequest): - request = nfs_share.GetNfsShareRequest(request) + if not isinstance(request, provisioning.UpdateProvisioningConfigRequest): + request = provisioning.UpdateProvisioningConfigRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: - request.name = name + if provisioning_config is not None: + request.provisioning_config = provisioning_config + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_nfs_share] + rpc = self._transport._wrapped_methods[ + self._transport.update_provisioning_config + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata( + (("provisioning_config.name", request.provisioning_config.name),) + ), ) # Send the request. @@ -2622,16 +5614,19 @@ def sample_get_nfs_share(): # Done; return the response. return response - def list_nfs_shares( + def rename_network( self, - request: Optional[Union[nfs_share.ListNfsSharesRequest, dict]] = None, + request: Optional[Union[network.RenameNetworkRequest, dict]] = None, *, - parent: Optional[str] = None, + name: Optional[str] = None, + new_network_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListNfsSharesPager: - r"""List NFS shares. + ) -> network.Network: + r"""RenameNetwork sets a new name for a network. + Use with caution, previous names become immediately + invalidated. .. code-block:: python @@ -2644,31 +5639,37 @@ def list_nfs_shares( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_list_nfs_shares(): + def sample_rename_network(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.ListNfsSharesRequest( - parent="parent_value", + request = bare_metal_solution_v2.RenameNetworkRequest( + name="name_value", + new_network_id="new_network_id_value", ) # Make the request - page_result = client.list_nfs_shares(request=request) + response = client.rename_network(request=request) # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest, dict]): - The request object. Message for requesting a list of NFS - shares. - parent (str): - Required. Parent value for - ListNfsSharesRequest. + request (Union[google.cloud.bare_metal_solution_v2.types.RenameNetworkRequest, dict]): + The request object. Message requesting rename of a + server. + name (str): + Required. The ``name`` field is used to identify the + network. Format: + projects/{project}/locations/{location}/networks/{network} - This corresponds to the ``parent`` field + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_network_id (str): + Required. The new ``id`` of the network. + This corresponds to the ``new_network_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2678,18 +5679,13 @@ def sample_list_nfs_shares(): sent along with the request as metadata. Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesPager: - Response message containing the list - of NFS shares. - Iterating over this object will yield - results and resolve additional pages - automatically. - + google.cloud.bare_metal_solution_v2.types.Network: + A Network. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([name, new_network_id]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2697,24 +5693,26 @@ def sample_list_nfs_shares(): ) # Minor optimization to avoid making a copy if the user passes - # in a nfs_share.ListNfsSharesRequest. + # in a network.RenameNetworkRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, nfs_share.ListNfsSharesRequest): - request = nfs_share.ListNfsSharesRequest(request) + if not isinstance(request, network.RenameNetworkRequest): + request = network.RenameNetworkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent + if name is not None: + request.name = name + if new_network_id is not None: + request.new_network_id = new_network_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_nfs_shares] + rpc = self._transport._wrapped_methods[self._transport.rename_network] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. @@ -2725,29 +5723,20 @@ def sample_list_nfs_shares(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListNfsSharesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - # Done; return the response. return response - def update_nfs_share( + def list_os_images( self, - request: Optional[Union[gcb_nfs_share.UpdateNfsShareRequest, dict]] = None, + request: Optional[Union[osimage.ListOSImagesRequest, dict]] = None, *, - nfs_share: Optional[gcb_nfs_share.NfsShare] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Update details of a single NFS share. + ) -> pagers.ListOSImagesPager: + r"""Retrieves the list of OS images which are currently + approved. .. code-block:: python @@ -2760,43 +5749,31 @@ def update_nfs_share( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bare_metal_solution_v2 - def sample_update_nfs_share(): + def sample_list_os_images(): # Create a client client = bare_metal_solution_v2.BareMetalSolutionClient() # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNfsShareRequest( + request = bare_metal_solution_v2.ListOSImagesRequest( + parent="parent_value", ) # Make the request - operation = client.update_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() + page_result = client.list_os_images(request=request) # Handle the response - print(response) + for response in page_result: + print(response) Args: - request (Union[google.cloud.bare_metal_solution_v2.types.UpdateNfsShareRequest, dict]): - The request object. Message requesting to updating a NFS - share. - nfs_share (google.cloud.bare_metal_solution_v2.types.NfsShare): - Required. The NFS share to update. - - The ``name`` field is used to identify the NFS share to - update. Format: - projects/{project}/locations/{location}/nfsShares/{nfs_share} - - This corresponds to the ``nfs_share`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. The only currently - supported fields are: ``labels`` + request (Union[google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest, dict]): + The request object. Request for getting all available OS + images. + parent (str): + Required. Parent value for + ListProvisioningQuotasRequest. - This corresponds to the ``update_mask`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2806,18 +5783,18 @@ def sample_update_nfs_share(): sent along with the request as metadata. Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.NfsShare` - An NFS share. + google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListOSImagesPager: + Request for getting all available OS + images. + Iterating over this object will yield + results and resolve additional pages + automatically. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([nfs_share, update_mask]) + has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2825,28 +5802,24 @@ def sample_update_nfs_share(): ) # Minor optimization to avoid making a copy if the user passes - # in a gcb_nfs_share.UpdateNfsShareRequest. + # in a osimage.ListOSImagesRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, gcb_nfs_share.UpdateNfsShareRequest): - request = gcb_nfs_share.UpdateNfsShareRequest(request) + if not isinstance(request, osimage.ListOSImagesRequest): + request = osimage.ListOSImagesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if nfs_share is not None: - request.nfs_share = nfs_share - if update_mask is not None: - request.update_mask = update_mask + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_nfs_share] + rpc = self._transport._wrapped_methods[self._transport.list_os_images] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("nfs_share.name", request.nfs_share.name),) - ), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. @@ -2857,12 +5830,13 @@ def sample_update_nfs_share(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcb_nfs_share.NfsShare, - metadata_type=baremetalsolution.OperationMetadata, + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOSImagesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/pagers.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/pagers.py index 649300d75c3f..49520241aafb 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/pagers.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/pagers.py @@ -29,7 +29,11 @@ lun, network, nfs_share, + osimage, + provisioning, + ssh_key, volume, + volume_snapshot, ) @@ -161,6 +165,134 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListSSHKeysPager: + """A pager for iterating through ``list_ssh_keys`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse` object, and + provides an ``__iter__`` method to iterate through its + ``ssh_keys`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSSHKeys`` requests and continue to iterate + through the ``ssh_keys`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., ssh_key.ListSSHKeysResponse], + request: ssh_key.ListSSHKeysRequest, + response: ssh_key.ListSSHKeysResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest): + The initial request object. + response (google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ssh_key.ListSSHKeysRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[ssh_key.ListSSHKeysResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[ssh_key.SSHKey]: + for page in self.pages: + yield from page.ssh_keys + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSSHKeysAsyncPager: + """A pager for iterating through ``list_ssh_keys`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``ssh_keys`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSSHKeys`` requests and continue to iterate + through the ``ssh_keys`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[ssh_key.ListSSHKeysResponse]], + request: ssh_key.ListSSHKeysRequest, + response: ssh_key.ListSSHKeysResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest): + The initial request object. + response (google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ssh_key.ListSSHKeysRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[ssh_key.ListSSHKeysResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[ssh_key.SSHKey]: + async def async_generator(): + async for page in self.pages: + for response in page.ssh_keys: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListVolumesPager: """A pager for iterating through ``list_volumes`` requests. @@ -417,6 +549,134 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListVolumeSnapshotsPager: + """A pager for iterating through ``list_volume_snapshots`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``volume_snapshots`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListVolumeSnapshots`` requests and continue to iterate + through the ``volume_snapshots`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., volume_snapshot.ListVolumeSnapshotsResponse], + request: volume_snapshot.ListVolumeSnapshotsRequest, + response: volume_snapshot.ListVolumeSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest): + The initial request object. + response (google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = volume_snapshot.ListVolumeSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[volume_snapshot.ListVolumeSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[volume_snapshot.VolumeSnapshot]: + for page in self.pages: + yield from page.volume_snapshots + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListVolumeSnapshotsAsyncPager: + """A pager for iterating through ``list_volume_snapshots`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``volume_snapshots`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListVolumeSnapshots`` requests and continue to iterate + through the ``volume_snapshots`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[volume_snapshot.ListVolumeSnapshotsResponse]], + request: volume_snapshot.ListVolumeSnapshotsRequest, + response: volume_snapshot.ListVolumeSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest): + The initial request object. + response (google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = volume_snapshot.ListVolumeSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[volume_snapshot.ListVolumeSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[volume_snapshot.VolumeSnapshot]: + async def async_generator(): + async for page in self.pages: + for response in page.volume_snapshots: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListLunsPager: """A pager for iterating through ``list_luns`` requests. @@ -671,3 +931,259 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListProvisioningQuotasPager: + """A pager for iterating through ``list_provisioning_quotas`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse` object, and + provides an ``__iter__`` method to iterate through its + ``provisioning_quotas`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListProvisioningQuotas`` requests and continue to iterate + through the ``provisioning_quotas`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., provisioning.ListProvisioningQuotasResponse], + request: provisioning.ListProvisioningQuotasRequest, + response: provisioning.ListProvisioningQuotasResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest): + The initial request object. + response (google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = provisioning.ListProvisioningQuotasRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[provisioning.ListProvisioningQuotasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[provisioning.ProvisioningQuota]: + for page in self.pages: + yield from page.provisioning_quotas + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListProvisioningQuotasAsyncPager: + """A pager for iterating through ``list_provisioning_quotas`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``provisioning_quotas`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListProvisioningQuotas`` requests and continue to iterate + through the ``provisioning_quotas`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[provisioning.ListProvisioningQuotasResponse]], + request: provisioning.ListProvisioningQuotasRequest, + response: provisioning.ListProvisioningQuotasResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest): + The initial request object. + response (google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = provisioning.ListProvisioningQuotasRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[provisioning.ListProvisioningQuotasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[provisioning.ProvisioningQuota]: + async def async_generator(): + async for page in self.pages: + for response in page.provisioning_quotas: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSImagesPager: + """A pager for iterating through ``list_os_images`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``os_images`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOSImages`` requests and continue to iterate + through the ``os_images`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., osimage.ListOSImagesResponse], + request: osimage.ListOSImagesRequest, + response: osimage.ListOSImagesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest): + The initial request object. + response (google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = osimage.ListOSImagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[osimage.ListOSImagesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[osimage.OSImage]: + for page in self.pages: + yield from page.os_images + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSImagesAsyncPager: + """A pager for iterating through ``list_os_images`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``os_images`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOSImages`` requests and continue to iterate + through the ``os_images`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[osimage.ListOSImagesResponse]], + request: osimage.ListOSImagesRequest, + response: osimage.ListOSImagesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest): + The initial request object. + response (google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = osimage.ListOSImagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[osimage.ListOSImagesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[osimage.OSImage]: + async def async_generator(): + async for page in self.pages: + for response in page.os_images: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/base.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/base.py index dd17b5af9934..d53b78bf3ded 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/base.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/base.py @@ -27,17 +27,25 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.cloud.bare_metal_solution_v2 import gapic_version as package_version from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share +from google.cloud.bare_metal_solution_v2.types import ( + volume_snapshot as gcb_volume_snapshot, +) from google.cloud.bare_metal_solution_v2.types import instance from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance from google.cloud.bare_metal_solution_v2.types import lun from google.cloud.bare_metal_solution_v2.types import network from google.cloud.bare_metal_solution_v2.types import network as gcb_network from google.cloud.bare_metal_solution_v2.types import nfs_share +from google.cloud.bare_metal_solution_v2.types import osimage, provisioning +from google.cloud.bare_metal_solution_v2.types import ssh_key +from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key from google.cloud.bare_metal_solution_v2.types import volume from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume +from google.cloud.bare_metal_solution_v2.types import volume_snapshot DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -149,6 +157,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.rename_instance: gapic_v1.method.wrap_method( + self.rename_instance, + default_timeout=None, + client_info=client_info, + ), self.reset_instance: gapic_v1.method.wrap_method( self.reset_instance, default_timeout=None, @@ -164,11 +177,36 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.enable_interactive_serial_console: gapic_v1.method.wrap_method( + self.enable_interactive_serial_console, + default_timeout=None, + client_info=client_info, + ), + self.disable_interactive_serial_console: gapic_v1.method.wrap_method( + self.disable_interactive_serial_console, + default_timeout=None, + client_info=client_info, + ), self.detach_lun: gapic_v1.method.wrap_method( self.detach_lun, default_timeout=None, client_info=client_info, ), + self.list_ssh_keys: gapic_v1.method.wrap_method( + self.list_ssh_keys, + default_timeout=None, + client_info=client_info, + ), + self.create_ssh_key: gapic_v1.method.wrap_method( + self.create_ssh_key, + default_timeout=None, + client_info=client_info, + ), + self.delete_ssh_key: gapic_v1.method.wrap_method( + self.delete_ssh_key, + default_timeout=None, + client_info=client_info, + ), self.list_volumes: gapic_v1.method.wrap_method( self.list_volumes, default_timeout=None, @@ -184,6 +222,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.rename_volume: gapic_v1.method.wrap_method( + self.rename_volume, + default_timeout=None, + client_info=client_info, + ), + self.evict_volume: gapic_v1.method.wrap_method( + self.evict_volume, + default_timeout=None, + client_info=client_info, + ), self.resize_volume: gapic_v1.method.wrap_method( self.resize_volume, default_timeout=None, @@ -209,6 +257,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_volume_snapshot: gapic_v1.method.wrap_method( + self.create_volume_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.restore_volume_snapshot: gapic_v1.method.wrap_method( + self.restore_volume_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.delete_volume_snapshot: gapic_v1.method.wrap_method( + self.delete_volume_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.get_volume_snapshot: gapic_v1.method.wrap_method( + self.get_volume_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.list_volume_snapshots: gapic_v1.method.wrap_method( + self.list_volume_snapshots, + default_timeout=None, + client_info=client_info, + ), self.get_lun: gapic_v1.method.wrap_method( self.get_lun, default_timeout=None, @@ -219,6 +292,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.evict_lun: gapic_v1.method.wrap_method( + self.evict_lun, + default_timeout=None, + client_info=client_info, + ), self.get_nfs_share: gapic_v1.method.wrap_method( self.get_nfs_share, default_timeout=None, @@ -234,6 +312,56 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_nfs_share: gapic_v1.method.wrap_method( + self.create_nfs_share, + default_timeout=None, + client_info=client_info, + ), + self.rename_nfs_share: gapic_v1.method.wrap_method( + self.rename_nfs_share, + default_timeout=None, + client_info=client_info, + ), + self.delete_nfs_share: gapic_v1.method.wrap_method( + self.delete_nfs_share, + default_timeout=None, + client_info=client_info, + ), + self.list_provisioning_quotas: gapic_v1.method.wrap_method( + self.list_provisioning_quotas, + default_timeout=None, + client_info=client_info, + ), + self.submit_provisioning_config: gapic_v1.method.wrap_method( + self.submit_provisioning_config, + default_timeout=None, + client_info=client_info, + ), + self.get_provisioning_config: gapic_v1.method.wrap_method( + self.get_provisioning_config, + default_timeout=None, + client_info=client_info, + ), + self.create_provisioning_config: gapic_v1.method.wrap_method( + self.create_provisioning_config, + default_timeout=None, + client_info=client_info, + ), + self.update_provisioning_config: gapic_v1.method.wrap_method( + self.update_provisioning_config, + default_timeout=None, + client_info=client_info, + ), + self.rename_network: gapic_v1.method.wrap_method( + self.rename_network, + default_timeout=None, + client_info=client_info, + ), + self.list_os_images: gapic_v1.method.wrap_method( + self.list_os_images, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -279,6 +407,15 @@ def update_instance( ]: raise NotImplementedError() + @property + def rename_instance( + self, + ) -> Callable[ + [instance.RenameInstanceRequest], + Union[instance.Instance, Awaitable[instance.Instance]], + ]: + raise NotImplementedError() + @property def reset_instance( self, @@ -306,6 +443,24 @@ def stop_instance( ]: raise NotImplementedError() + @property + def enable_interactive_serial_console( + self, + ) -> Callable[ + [instance.EnableInteractiveSerialConsoleRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def disable_interactive_serial_console( + self, + ) -> Callable[ + [instance.DisableInteractiveSerialConsoleRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def detach_lun( self, @@ -315,6 +470,33 @@ def detach_lun( ]: raise NotImplementedError() + @property + def list_ssh_keys( + self, + ) -> Callable[ + [ssh_key.ListSSHKeysRequest], + Union[ssh_key.ListSSHKeysResponse, Awaitable[ssh_key.ListSSHKeysResponse]], + ]: + raise NotImplementedError() + + @property + def create_ssh_key( + self, + ) -> Callable[ + [gcb_ssh_key.CreateSSHKeyRequest], + Union[gcb_ssh_key.SSHKey, Awaitable[gcb_ssh_key.SSHKey]], + ]: + raise NotImplementedError() + + @property + def delete_ssh_key( + self, + ) -> Callable[ + [ssh_key.DeleteSSHKeyRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def list_volumes( self, @@ -341,6 +523,23 @@ def update_volume( ]: raise NotImplementedError() + @property + def rename_volume( + self, + ) -> Callable[ + [volume.RenameVolumeRequest], Union[volume.Volume, Awaitable[volume.Volume]] + ]: + raise NotImplementedError() + + @property + def evict_volume( + self, + ) -> Callable[ + [volume.EvictVolumeRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def resize_volume( self, @@ -388,6 +587,59 @@ def update_network( ]: raise NotImplementedError() + @property + def create_volume_snapshot( + self, + ) -> Callable[ + [gcb_volume_snapshot.CreateVolumeSnapshotRequest], + Union[ + gcb_volume_snapshot.VolumeSnapshot, + Awaitable[gcb_volume_snapshot.VolumeSnapshot], + ], + ]: + raise NotImplementedError() + + @property + def restore_volume_snapshot( + self, + ) -> Callable[ + [gcb_volume_snapshot.RestoreVolumeSnapshotRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_volume_snapshot( + self, + ) -> Callable[ + [volume_snapshot.DeleteVolumeSnapshotRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_volume_snapshot( + self, + ) -> Callable[ + [volume_snapshot.GetVolumeSnapshotRequest], + Union[ + volume_snapshot.VolumeSnapshot, Awaitable[volume_snapshot.VolumeSnapshot] + ], + ]: + raise NotImplementedError() + + @property + def list_volume_snapshots( + self, + ) -> Callable[ + [volume_snapshot.ListVolumeSnapshotsRequest], + Union[ + volume_snapshot.ListVolumeSnapshotsResponse, + Awaitable[volume_snapshot.ListVolumeSnapshotsResponse], + ], + ]: + raise NotImplementedError() + @property def get_lun( self, @@ -403,6 +655,15 @@ def list_luns( ]: raise NotImplementedError() + @property + def evict_lun( + self, + ) -> Callable[ + [lun.EvictLunRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_nfs_share( self, @@ -432,6 +693,108 @@ def update_nfs_share( ]: raise NotImplementedError() + @property + def create_nfs_share( + self, + ) -> Callable[ + [gcb_nfs_share.CreateNfsShareRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def rename_nfs_share( + self, + ) -> Callable[ + [nfs_share.RenameNfsShareRequest], + Union[nfs_share.NfsShare, Awaitable[nfs_share.NfsShare]], + ]: + raise NotImplementedError() + + @property + def delete_nfs_share( + self, + ) -> Callable[ + [nfs_share.DeleteNfsShareRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_provisioning_quotas( + self, + ) -> Callable[ + [provisioning.ListProvisioningQuotasRequest], + Union[ + provisioning.ListProvisioningQuotasResponse, + Awaitable[provisioning.ListProvisioningQuotasResponse], + ], + ]: + raise NotImplementedError() + + @property + def submit_provisioning_config( + self, + ) -> Callable[ + [provisioning.SubmitProvisioningConfigRequest], + Union[ + provisioning.SubmitProvisioningConfigResponse, + Awaitable[provisioning.SubmitProvisioningConfigResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_provisioning_config( + self, + ) -> Callable[ + [provisioning.GetProvisioningConfigRequest], + Union[ + provisioning.ProvisioningConfig, Awaitable[provisioning.ProvisioningConfig] + ], + ]: + raise NotImplementedError() + + @property + def create_provisioning_config( + self, + ) -> Callable[ + [provisioning.CreateProvisioningConfigRequest], + Union[ + provisioning.ProvisioningConfig, Awaitable[provisioning.ProvisioningConfig] + ], + ]: + raise NotImplementedError() + + @property + def update_provisioning_config( + self, + ) -> Callable[ + [provisioning.UpdateProvisioningConfigRequest], + Union[ + provisioning.ProvisioningConfig, Awaitable[provisioning.ProvisioningConfig] + ], + ]: + raise NotImplementedError() + + @property + def rename_network( + self, + ) -> Callable[ + [network.RenameNetworkRequest], + Union[network.Network, Awaitable[network.Network]], + ]: + raise NotImplementedError() + + @property + def list_os_images( + self, + ) -> Callable[ + [osimage.ListOSImagesRequest], + Union[osimage.ListOSImagesResponse, Awaitable[osimage.ListOSImagesResponse]], + ]: + raise NotImplementedError() + @property def get_location( self, diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc.py index 59e13c22fdd4..33826193e2b1 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc.py @@ -24,17 +24,25 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share +from google.cloud.bare_metal_solution_v2.types import ( + volume_snapshot as gcb_volume_snapshot, +) from google.cloud.bare_metal_solution_v2.types import instance from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance from google.cloud.bare_metal_solution_v2.types import lun from google.cloud.bare_metal_solution_v2.types import network from google.cloud.bare_metal_solution_v2.types import network as gcb_network from google.cloud.bare_metal_solution_v2.types import nfs_share +from google.cloud.bare_metal_solution_v2.types import osimage, provisioning +from google.cloud.bare_metal_solution_v2.types import ssh_key +from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key from google.cloud.bare_metal_solution_v2.types import volume from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume +from google.cloud.bare_metal_solution_v2.types import volume_snapshot from .base import DEFAULT_CLIENT_INFO, BareMetalSolutionTransport @@ -341,6 +349,34 @@ def update_instance( ) return self._stubs["update_instance"] + @property + def rename_instance( + self, + ) -> Callable[[instance.RenameInstanceRequest], instance.Instance]: + r"""Return a callable for the rename instance method over gRPC. + + RenameInstance sets a new name for an instance. + Use with caution, previous names become immediately + invalidated. + + Returns: + Callable[[~.RenameInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_instance" not in self._stubs: + self._stubs["rename_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameInstance", + request_serializer=instance.RenameInstanceRequest.serialize, + response_deserializer=instance.Instance.deserialize, + ) + return self._stubs["rename_instance"] + @property def reset_instance( self, @@ -421,6 +457,70 @@ def stop_instance( ) return self._stubs["stop_instance"] + @property + def enable_interactive_serial_console( + self, + ) -> Callable[ + [instance.EnableInteractiveSerialConsoleRequest], operations_pb2.Operation + ]: + r"""Return a callable for the enable interactive serial + console method over gRPC. + + Enable the interactive serial console feature on an + instance. + + Returns: + Callable[[~.EnableInteractiveSerialConsoleRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_interactive_serial_console" not in self._stubs: + self._stubs[ + "enable_interactive_serial_console" + ] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/EnableInteractiveSerialConsole", + request_serializer=instance.EnableInteractiveSerialConsoleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["enable_interactive_serial_console"] + + @property + def disable_interactive_serial_console( + self, + ) -> Callable[ + [instance.DisableInteractiveSerialConsoleRequest], operations_pb2.Operation + ]: + r"""Return a callable for the disable interactive serial + console method over gRPC. + + Disable the interactive serial console feature on an + instance. + + Returns: + Callable[[~.DisableInteractiveSerialConsoleRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_interactive_serial_console" not in self._stubs: + self._stubs[ + "disable_interactive_serial_console" + ] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/DisableInteractiveSerialConsole", + request_serializer=instance.DisableInteractiveSerialConsoleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["disable_interactive_serial_console"] + @property def detach_lun( self, @@ -447,6 +547,88 @@ def detach_lun( ) return self._stubs["detach_lun"] + @property + def list_ssh_keys( + self, + ) -> Callable[[ssh_key.ListSSHKeysRequest], ssh_key.ListSSHKeysResponse]: + r"""Return a callable for the list ssh keys method over gRPC. + + Lists the public SSH keys registered for the + specified project. These SSH keys are used only for the + interactive serial console feature. + + Returns: + Callable[[~.ListSSHKeysRequest], + ~.ListSSHKeysResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_ssh_keys" not in self._stubs: + self._stubs["list_ssh_keys"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/ListSSHKeys", + request_serializer=ssh_key.ListSSHKeysRequest.serialize, + response_deserializer=ssh_key.ListSSHKeysResponse.deserialize, + ) + return self._stubs["list_ssh_keys"] + + @property + def create_ssh_key( + self, + ) -> Callable[[gcb_ssh_key.CreateSSHKeyRequest], gcb_ssh_key.SSHKey]: + r"""Return a callable for the create ssh key method over gRPC. + + Register a public SSH key in the specified project + for use with the interactive serial console feature. + + Returns: + Callable[[~.CreateSSHKeyRequest], + ~.SSHKey]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_ssh_key" not in self._stubs: + self._stubs["create_ssh_key"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateSSHKey", + request_serializer=gcb_ssh_key.CreateSSHKeyRequest.serialize, + response_deserializer=gcb_ssh_key.SSHKey.deserialize, + ) + return self._stubs["create_ssh_key"] + + @property + def delete_ssh_key( + self, + ) -> Callable[[ssh_key.DeleteSSHKeyRequest], empty_pb2.Empty]: + r"""Return a callable for the delete ssh key method over gRPC. + + Deletes a public SSH key registered in the specified + project. + + Returns: + Callable[[~.DeleteSSHKeyRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_ssh_key" not in self._stubs: + self._stubs["delete_ssh_key"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteSSHKey", + request_serializer=ssh_key.DeleteSSHKeyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_ssh_key"] + @property def list_volumes( self, @@ -523,6 +705,59 @@ def update_volume( ) return self._stubs["update_volume"] + @property + def rename_volume(self) -> Callable[[volume.RenameVolumeRequest], volume.Volume]: + r"""Return a callable for the rename volume method over gRPC. + + RenameVolume sets a new name for a volume. + Use with caution, previous names become immediately + invalidated. + + Returns: + Callable[[~.RenameVolumeRequest], + ~.Volume]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_volume" not in self._stubs: + self._stubs["rename_volume"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameVolume", + request_serializer=volume.RenameVolumeRequest.serialize, + response_deserializer=volume.Volume.deserialize, + ) + return self._stubs["rename_volume"] + + @property + def evict_volume( + self, + ) -> Callable[[volume.EvictVolumeRequest], operations_pb2.Operation]: + r"""Return a callable for the evict volume method over gRPC. + + Skips volume's cooloff and deletes it now. + Volume must be in cooloff state. + + Returns: + Callable[[~.EvictVolumeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "evict_volume" not in self._stubs: + self._stubs["evict_volume"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/EvictVolume", + request_serializer=volume.EvictVolumeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["evict_volume"] + @property def resize_volume( self, @@ -653,6 +888,152 @@ def update_network( ) return self._stubs["update_network"] + @property + def create_volume_snapshot( + self, + ) -> Callable[ + [gcb_volume_snapshot.CreateVolumeSnapshotRequest], + gcb_volume_snapshot.VolumeSnapshot, + ]: + r"""Return a callable for the create volume snapshot method over gRPC. + + Takes a snapshot of a boot volume. Returns INVALID_ARGUMENT if + called for a non-boot volume. + + Returns: + Callable[[~.CreateVolumeSnapshotRequest], + ~.VolumeSnapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_volume_snapshot" not in self._stubs: + self._stubs["create_volume_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateVolumeSnapshot", + request_serializer=gcb_volume_snapshot.CreateVolumeSnapshotRequest.serialize, + response_deserializer=gcb_volume_snapshot.VolumeSnapshot.deserialize, + ) + return self._stubs["create_volume_snapshot"] + + @property + def restore_volume_snapshot( + self, + ) -> Callable[ + [gcb_volume_snapshot.RestoreVolumeSnapshotRequest], operations_pb2.Operation + ]: + r"""Return a callable for the restore volume snapshot method over gRPC. + + Uses the specified snapshot to restore its parent volume. + Returns INVALID_ARGUMENT if called for a non-boot volume. + + Returns: + Callable[[~.RestoreVolumeSnapshotRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_volume_snapshot" not in self._stubs: + self._stubs["restore_volume_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/RestoreVolumeSnapshot", + request_serializer=gcb_volume_snapshot.RestoreVolumeSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_volume_snapshot"] + + @property + def delete_volume_snapshot( + self, + ) -> Callable[[volume_snapshot.DeleteVolumeSnapshotRequest], empty_pb2.Empty]: + r"""Return a callable for the delete volume snapshot method over gRPC. + + Deletes a volume snapshot. Returns INVALID_ARGUMENT if called + for a non-boot volume. + + Returns: + Callable[[~.DeleteVolumeSnapshotRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_volume_snapshot" not in self._stubs: + self._stubs["delete_volume_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteVolumeSnapshot", + request_serializer=volume_snapshot.DeleteVolumeSnapshotRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_volume_snapshot"] + + @property + def get_volume_snapshot( + self, + ) -> Callable[ + [volume_snapshot.GetVolumeSnapshotRequest], volume_snapshot.VolumeSnapshot + ]: + r"""Return a callable for the get volume snapshot method over gRPC. + + Returns the specified snapshot resource. Returns + INVALID_ARGUMENT if called for a non-boot volume. + + Returns: + Callable[[~.GetVolumeSnapshotRequest], + ~.VolumeSnapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_volume_snapshot" not in self._stubs: + self._stubs["get_volume_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/GetVolumeSnapshot", + request_serializer=volume_snapshot.GetVolumeSnapshotRequest.serialize, + response_deserializer=volume_snapshot.VolumeSnapshot.deserialize, + ) + return self._stubs["get_volume_snapshot"] + + @property + def list_volume_snapshots( + self, + ) -> Callable[ + [volume_snapshot.ListVolumeSnapshotsRequest], + volume_snapshot.ListVolumeSnapshotsResponse, + ]: + r"""Return a callable for the list volume snapshots method over gRPC. + + Retrieves the list of snapshots for the specified + volume. Returns a response with an empty list of + snapshots if called for a non-boot volume. + + Returns: + Callable[[~.ListVolumeSnapshotsRequest], + ~.ListVolumeSnapshotsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_volume_snapshots" not in self._stubs: + self._stubs["list_volume_snapshots"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/ListVolumeSnapshots", + request_serializer=volume_snapshot.ListVolumeSnapshotsRequest.serialize, + response_deserializer=volume_snapshot.ListVolumeSnapshotsResponse.deserialize, + ) + return self._stubs["list_volume_snapshots"] + @property def get_lun(self) -> Callable[[lun.GetLunRequest], lun.Lun]: r"""Return a callable for the get lun method over gRPC. @@ -702,6 +1083,31 @@ def list_luns(self) -> Callable[[lun.ListLunsRequest], lun.ListLunsResponse]: ) return self._stubs["list_luns"] + @property + def evict_lun(self) -> Callable[[lun.EvictLunRequest], operations_pb2.Operation]: + r"""Return a callable for the evict lun method over gRPC. + + Skips lun's cooloff and deletes it now. + Lun must be in cooloff state. + + Returns: + Callable[[~.EvictLunRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "evict_lun" not in self._stubs: + self._stubs["evict_lun"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/EvictLun", + request_serializer=lun.EvictLunRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["evict_lun"] + @property def get_nfs_share( self, @@ -780,6 +1186,286 @@ def update_nfs_share( ) return self._stubs["update_nfs_share"] + @property + def create_nfs_share( + self, + ) -> Callable[[gcb_nfs_share.CreateNfsShareRequest], operations_pb2.Operation]: + r"""Return a callable for the create nfs share method over gRPC. + + Create an NFS share. + + Returns: + Callable[[~.CreateNfsShareRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_nfs_share" not in self._stubs: + self._stubs["create_nfs_share"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateNfsShare", + request_serializer=gcb_nfs_share.CreateNfsShareRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_nfs_share"] + + @property + def rename_nfs_share( + self, + ) -> Callable[[nfs_share.RenameNfsShareRequest], nfs_share.NfsShare]: + r"""Return a callable for the rename nfs share method over gRPC. + + RenameNfsShare sets a new name for an nfsshare. + Use with caution, previous names become immediately + invalidated. + + Returns: + Callable[[~.RenameNfsShareRequest], + ~.NfsShare]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_nfs_share" not in self._stubs: + self._stubs["rename_nfs_share"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameNfsShare", + request_serializer=nfs_share.RenameNfsShareRequest.serialize, + response_deserializer=nfs_share.NfsShare.deserialize, + ) + return self._stubs["rename_nfs_share"] + + @property + def delete_nfs_share( + self, + ) -> Callable[[nfs_share.DeleteNfsShareRequest], operations_pb2.Operation]: + r"""Return a callable for the delete nfs share method over gRPC. + + Delete an NFS share. The underlying volume is + automatically deleted. + + Returns: + Callable[[~.DeleteNfsShareRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_nfs_share" not in self._stubs: + self._stubs["delete_nfs_share"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteNfsShare", + request_serializer=nfs_share.DeleteNfsShareRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_nfs_share"] + + @property + def list_provisioning_quotas( + self, + ) -> Callable[ + [provisioning.ListProvisioningQuotasRequest], + provisioning.ListProvisioningQuotasResponse, + ]: + r"""Return a callable for the list provisioning quotas method over gRPC. + + List the budget details to provision resources on a + given project. + + Returns: + Callable[[~.ListProvisioningQuotasRequest], + ~.ListProvisioningQuotasResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_provisioning_quotas" not in self._stubs: + self._stubs["list_provisioning_quotas"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/ListProvisioningQuotas", + request_serializer=provisioning.ListProvisioningQuotasRequest.serialize, + response_deserializer=provisioning.ListProvisioningQuotasResponse.deserialize, + ) + return self._stubs["list_provisioning_quotas"] + + @property + def submit_provisioning_config( + self, + ) -> Callable[ + [provisioning.SubmitProvisioningConfigRequest], + provisioning.SubmitProvisioningConfigResponse, + ]: + r"""Return a callable for the submit provisioning config method over gRPC. + + Submit a provisiong configuration for a given + project. + + Returns: + Callable[[~.SubmitProvisioningConfigRequest], + ~.SubmitProvisioningConfigResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_provisioning_config" not in self._stubs: + self._stubs["submit_provisioning_config"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/SubmitProvisioningConfig", + request_serializer=provisioning.SubmitProvisioningConfigRequest.serialize, + response_deserializer=provisioning.SubmitProvisioningConfigResponse.deserialize, + ) + return self._stubs["submit_provisioning_config"] + + @property + def get_provisioning_config( + self, + ) -> Callable[ + [provisioning.GetProvisioningConfigRequest], provisioning.ProvisioningConfig + ]: + r"""Return a callable for the get provisioning config method over gRPC. + + Get ProvisioningConfig by name. + + Returns: + Callable[[~.GetProvisioningConfigRequest], + ~.ProvisioningConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_provisioning_config" not in self._stubs: + self._stubs["get_provisioning_config"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/GetProvisioningConfig", + request_serializer=provisioning.GetProvisioningConfigRequest.serialize, + response_deserializer=provisioning.ProvisioningConfig.deserialize, + ) + return self._stubs["get_provisioning_config"] + + @property + def create_provisioning_config( + self, + ) -> Callable[ + [provisioning.CreateProvisioningConfigRequest], provisioning.ProvisioningConfig + ]: + r"""Return a callable for the create provisioning config method over gRPC. + + Create new ProvisioningConfig. + + Returns: + Callable[[~.CreateProvisioningConfigRequest], + ~.ProvisioningConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_provisioning_config" not in self._stubs: + self._stubs["create_provisioning_config"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateProvisioningConfig", + request_serializer=provisioning.CreateProvisioningConfigRequest.serialize, + response_deserializer=provisioning.ProvisioningConfig.deserialize, + ) + return self._stubs["create_provisioning_config"] + + @property + def update_provisioning_config( + self, + ) -> Callable[ + [provisioning.UpdateProvisioningConfigRequest], provisioning.ProvisioningConfig + ]: + r"""Return a callable for the update provisioning config method over gRPC. + + Update existing ProvisioningConfig. + + Returns: + Callable[[~.UpdateProvisioningConfigRequest], + ~.ProvisioningConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_provisioning_config" not in self._stubs: + self._stubs["update_provisioning_config"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateProvisioningConfig", + request_serializer=provisioning.UpdateProvisioningConfigRequest.serialize, + response_deserializer=provisioning.ProvisioningConfig.deserialize, + ) + return self._stubs["update_provisioning_config"] + + @property + def rename_network( + self, + ) -> Callable[[network.RenameNetworkRequest], network.Network]: + r"""Return a callable for the rename network method over gRPC. + + RenameNetwork sets a new name for a network. + Use with caution, previous names become immediately + invalidated. + + Returns: + Callable[[~.RenameNetworkRequest], + ~.Network]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_network" not in self._stubs: + self._stubs["rename_network"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameNetwork", + request_serializer=network.RenameNetworkRequest.serialize, + response_deserializer=network.Network.deserialize, + ) + return self._stubs["rename_network"] + + @property + def list_os_images( + self, + ) -> Callable[[osimage.ListOSImagesRequest], osimage.ListOSImagesResponse]: + r"""Return a callable for the list os images method over gRPC. + + Retrieves the list of OS images which are currently + approved. + + Returns: + Callable[[~.ListOSImagesRequest], + ~.ListOSImagesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_images" not in self._stubs: + self._stubs["list_os_images"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/ListOSImages", + request_serializer=osimage.ListOSImagesRequest.serialize, + response_deserializer=osimage.ListOSImagesResponse.deserialize, + ) + return self._stubs["list_os_images"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc_asyncio.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc_asyncio.py index 1744a6a8e77c..633321928468 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc_asyncio.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc_asyncio.py @@ -23,18 +23,26 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share +from google.cloud.bare_metal_solution_v2.types import ( + volume_snapshot as gcb_volume_snapshot, +) from google.cloud.bare_metal_solution_v2.types import instance from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance from google.cloud.bare_metal_solution_v2.types import lun from google.cloud.bare_metal_solution_v2.types import network from google.cloud.bare_metal_solution_v2.types import network as gcb_network from google.cloud.bare_metal_solution_v2.types import nfs_share +from google.cloud.bare_metal_solution_v2.types import osimage, provisioning +from google.cloud.bare_metal_solution_v2.types import ssh_key +from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key from google.cloud.bare_metal_solution_v2.types import volume from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume +from google.cloud.bare_metal_solution_v2.types import volume_snapshot from .base import DEFAULT_CLIENT_INFO, BareMetalSolutionTransport from .grpc import BareMetalSolutionGrpcTransport @@ -350,6 +358,34 @@ def update_instance( ) return self._stubs["update_instance"] + @property + def rename_instance( + self, + ) -> Callable[[instance.RenameInstanceRequest], Awaitable[instance.Instance]]: + r"""Return a callable for the rename instance method over gRPC. + + RenameInstance sets a new name for an instance. + Use with caution, previous names become immediately + invalidated. + + Returns: + Callable[[~.RenameInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_instance" not in self._stubs: + self._stubs["rename_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameInstance", + request_serializer=instance.RenameInstanceRequest.serialize, + response_deserializer=instance.Instance.deserialize, + ) + return self._stubs["rename_instance"] + @property def reset_instance( self, @@ -430,6 +466,72 @@ def stop_instance( ) return self._stubs["stop_instance"] + @property + def enable_interactive_serial_console( + self, + ) -> Callable[ + [instance.EnableInteractiveSerialConsoleRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the enable interactive serial + console method over gRPC. + + Enable the interactive serial console feature on an + instance. + + Returns: + Callable[[~.EnableInteractiveSerialConsoleRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enable_interactive_serial_console" not in self._stubs: + self._stubs[ + "enable_interactive_serial_console" + ] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/EnableInteractiveSerialConsole", + request_serializer=instance.EnableInteractiveSerialConsoleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["enable_interactive_serial_console"] + + @property + def disable_interactive_serial_console( + self, + ) -> Callable[ + [instance.DisableInteractiveSerialConsoleRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the disable interactive serial + console method over gRPC. + + Disable the interactive serial console feature on an + instance. + + Returns: + Callable[[~.DisableInteractiveSerialConsoleRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "disable_interactive_serial_console" not in self._stubs: + self._stubs[ + "disable_interactive_serial_console" + ] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/DisableInteractiveSerialConsole", + request_serializer=instance.DisableInteractiveSerialConsoleRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["disable_interactive_serial_console"] + @property def detach_lun( self, @@ -456,6 +558,88 @@ def detach_lun( ) return self._stubs["detach_lun"] + @property + def list_ssh_keys( + self, + ) -> Callable[[ssh_key.ListSSHKeysRequest], Awaitable[ssh_key.ListSSHKeysResponse]]: + r"""Return a callable for the list ssh keys method over gRPC. + + Lists the public SSH keys registered for the + specified project. These SSH keys are used only for the + interactive serial console feature. + + Returns: + Callable[[~.ListSSHKeysRequest], + Awaitable[~.ListSSHKeysResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_ssh_keys" not in self._stubs: + self._stubs["list_ssh_keys"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/ListSSHKeys", + request_serializer=ssh_key.ListSSHKeysRequest.serialize, + response_deserializer=ssh_key.ListSSHKeysResponse.deserialize, + ) + return self._stubs["list_ssh_keys"] + + @property + def create_ssh_key( + self, + ) -> Callable[[gcb_ssh_key.CreateSSHKeyRequest], Awaitable[gcb_ssh_key.SSHKey]]: + r"""Return a callable for the create ssh key method over gRPC. + + Register a public SSH key in the specified project + for use with the interactive serial console feature. + + Returns: + Callable[[~.CreateSSHKeyRequest], + Awaitable[~.SSHKey]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_ssh_key" not in self._stubs: + self._stubs["create_ssh_key"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateSSHKey", + request_serializer=gcb_ssh_key.CreateSSHKeyRequest.serialize, + response_deserializer=gcb_ssh_key.SSHKey.deserialize, + ) + return self._stubs["create_ssh_key"] + + @property + def delete_ssh_key( + self, + ) -> Callable[[ssh_key.DeleteSSHKeyRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete ssh key method over gRPC. + + Deletes a public SSH key registered in the specified + project. + + Returns: + Callable[[~.DeleteSSHKeyRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_ssh_key" not in self._stubs: + self._stubs["delete_ssh_key"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteSSHKey", + request_serializer=ssh_key.DeleteSSHKeyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_ssh_key"] + @property def list_volumes( self, @@ -536,6 +720,61 @@ def update_volume( ) return self._stubs["update_volume"] + @property + def rename_volume( + self, + ) -> Callable[[volume.RenameVolumeRequest], Awaitable[volume.Volume]]: + r"""Return a callable for the rename volume method over gRPC. + + RenameVolume sets a new name for a volume. + Use with caution, previous names become immediately + invalidated. + + Returns: + Callable[[~.RenameVolumeRequest], + Awaitable[~.Volume]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_volume" not in self._stubs: + self._stubs["rename_volume"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameVolume", + request_serializer=volume.RenameVolumeRequest.serialize, + response_deserializer=volume.Volume.deserialize, + ) + return self._stubs["rename_volume"] + + @property + def evict_volume( + self, + ) -> Callable[[volume.EvictVolumeRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the evict volume method over gRPC. + + Skips volume's cooloff and deletes it now. + Volume must be in cooloff state. + + Returns: + Callable[[~.EvictVolumeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "evict_volume" not in self._stubs: + self._stubs["evict_volume"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/EvictVolume", + request_serializer=volume.EvictVolumeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["evict_volume"] + @property def resize_volume( self, @@ -676,6 +915,156 @@ def update_network( ) return self._stubs["update_network"] + @property + def create_volume_snapshot( + self, + ) -> Callable[ + [gcb_volume_snapshot.CreateVolumeSnapshotRequest], + Awaitable[gcb_volume_snapshot.VolumeSnapshot], + ]: + r"""Return a callable for the create volume snapshot method over gRPC. + + Takes a snapshot of a boot volume. Returns INVALID_ARGUMENT if + called for a non-boot volume. + + Returns: + Callable[[~.CreateVolumeSnapshotRequest], + Awaitable[~.VolumeSnapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_volume_snapshot" not in self._stubs: + self._stubs["create_volume_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateVolumeSnapshot", + request_serializer=gcb_volume_snapshot.CreateVolumeSnapshotRequest.serialize, + response_deserializer=gcb_volume_snapshot.VolumeSnapshot.deserialize, + ) + return self._stubs["create_volume_snapshot"] + + @property + def restore_volume_snapshot( + self, + ) -> Callable[ + [gcb_volume_snapshot.RestoreVolumeSnapshotRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the restore volume snapshot method over gRPC. + + Uses the specified snapshot to restore its parent volume. + Returns INVALID_ARGUMENT if called for a non-boot volume. + + Returns: + Callable[[~.RestoreVolumeSnapshotRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_volume_snapshot" not in self._stubs: + self._stubs["restore_volume_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/RestoreVolumeSnapshot", + request_serializer=gcb_volume_snapshot.RestoreVolumeSnapshotRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_volume_snapshot"] + + @property + def delete_volume_snapshot( + self, + ) -> Callable[ + [volume_snapshot.DeleteVolumeSnapshotRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete volume snapshot method over gRPC. + + Deletes a volume snapshot. Returns INVALID_ARGUMENT if called + for a non-boot volume. + + Returns: + Callable[[~.DeleteVolumeSnapshotRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_volume_snapshot" not in self._stubs: + self._stubs["delete_volume_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteVolumeSnapshot", + request_serializer=volume_snapshot.DeleteVolumeSnapshotRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_volume_snapshot"] + + @property + def get_volume_snapshot( + self, + ) -> Callable[ + [volume_snapshot.GetVolumeSnapshotRequest], + Awaitable[volume_snapshot.VolumeSnapshot], + ]: + r"""Return a callable for the get volume snapshot method over gRPC. + + Returns the specified snapshot resource. Returns + INVALID_ARGUMENT if called for a non-boot volume. + + Returns: + Callable[[~.GetVolumeSnapshotRequest], + Awaitable[~.VolumeSnapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_volume_snapshot" not in self._stubs: + self._stubs["get_volume_snapshot"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/GetVolumeSnapshot", + request_serializer=volume_snapshot.GetVolumeSnapshotRequest.serialize, + response_deserializer=volume_snapshot.VolumeSnapshot.deserialize, + ) + return self._stubs["get_volume_snapshot"] + + @property + def list_volume_snapshots( + self, + ) -> Callable[ + [volume_snapshot.ListVolumeSnapshotsRequest], + Awaitable[volume_snapshot.ListVolumeSnapshotsResponse], + ]: + r"""Return a callable for the list volume snapshots method over gRPC. + + Retrieves the list of snapshots for the specified + volume. Returns a response with an empty list of + snapshots if called for a non-boot volume. + + Returns: + Callable[[~.ListVolumeSnapshotsRequest], + Awaitable[~.ListVolumeSnapshotsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_volume_snapshots" not in self._stubs: + self._stubs["list_volume_snapshots"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/ListVolumeSnapshots", + request_serializer=volume_snapshot.ListVolumeSnapshotsRequest.serialize, + response_deserializer=volume_snapshot.ListVolumeSnapshotsResponse.deserialize, + ) + return self._stubs["list_volume_snapshots"] + @property def get_lun(self) -> Callable[[lun.GetLunRequest], Awaitable[lun.Lun]]: r"""Return a callable for the get lun method over gRPC. @@ -727,6 +1116,33 @@ def list_luns( ) return self._stubs["list_luns"] + @property + def evict_lun( + self, + ) -> Callable[[lun.EvictLunRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the evict lun method over gRPC. + + Skips lun's cooloff and deletes it now. + Lun must be in cooloff state. + + Returns: + Callable[[~.EvictLunRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "evict_lun" not in self._stubs: + self._stubs["evict_lun"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/EvictLun", + request_serializer=lun.EvictLunRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["evict_lun"] + @property def get_nfs_share( self, @@ -809,6 +1225,295 @@ def update_nfs_share( ) return self._stubs["update_nfs_share"] + @property + def create_nfs_share( + self, + ) -> Callable[ + [gcb_nfs_share.CreateNfsShareRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create nfs share method over gRPC. + + Create an NFS share. + + Returns: + Callable[[~.CreateNfsShareRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_nfs_share" not in self._stubs: + self._stubs["create_nfs_share"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateNfsShare", + request_serializer=gcb_nfs_share.CreateNfsShareRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_nfs_share"] + + @property + def rename_nfs_share( + self, + ) -> Callable[[nfs_share.RenameNfsShareRequest], Awaitable[nfs_share.NfsShare]]: + r"""Return a callable for the rename nfs share method over gRPC. + + RenameNfsShare sets a new name for an nfsshare. + Use with caution, previous names become immediately + invalidated. + + Returns: + Callable[[~.RenameNfsShareRequest], + Awaitable[~.NfsShare]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_nfs_share" not in self._stubs: + self._stubs["rename_nfs_share"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameNfsShare", + request_serializer=nfs_share.RenameNfsShareRequest.serialize, + response_deserializer=nfs_share.NfsShare.deserialize, + ) + return self._stubs["rename_nfs_share"] + + @property + def delete_nfs_share( + self, + ) -> Callable[ + [nfs_share.DeleteNfsShareRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete nfs share method over gRPC. + + Delete an NFS share. The underlying volume is + automatically deleted. + + Returns: + Callable[[~.DeleteNfsShareRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_nfs_share" not in self._stubs: + self._stubs["delete_nfs_share"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteNfsShare", + request_serializer=nfs_share.DeleteNfsShareRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_nfs_share"] + + @property + def list_provisioning_quotas( + self, + ) -> Callable[ + [provisioning.ListProvisioningQuotasRequest], + Awaitable[provisioning.ListProvisioningQuotasResponse], + ]: + r"""Return a callable for the list provisioning quotas method over gRPC. + + List the budget details to provision resources on a + given project. + + Returns: + Callable[[~.ListProvisioningQuotasRequest], + Awaitable[~.ListProvisioningQuotasResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_provisioning_quotas" not in self._stubs: + self._stubs["list_provisioning_quotas"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/ListProvisioningQuotas", + request_serializer=provisioning.ListProvisioningQuotasRequest.serialize, + response_deserializer=provisioning.ListProvisioningQuotasResponse.deserialize, + ) + return self._stubs["list_provisioning_quotas"] + + @property + def submit_provisioning_config( + self, + ) -> Callable[ + [provisioning.SubmitProvisioningConfigRequest], + Awaitable[provisioning.SubmitProvisioningConfigResponse], + ]: + r"""Return a callable for the submit provisioning config method over gRPC. + + Submit a provisiong configuration for a given + project. + + Returns: + Callable[[~.SubmitProvisioningConfigRequest], + Awaitable[~.SubmitProvisioningConfigResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_provisioning_config" not in self._stubs: + self._stubs["submit_provisioning_config"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/SubmitProvisioningConfig", + request_serializer=provisioning.SubmitProvisioningConfigRequest.serialize, + response_deserializer=provisioning.SubmitProvisioningConfigResponse.deserialize, + ) + return self._stubs["submit_provisioning_config"] + + @property + def get_provisioning_config( + self, + ) -> Callable[ + [provisioning.GetProvisioningConfigRequest], + Awaitable[provisioning.ProvisioningConfig], + ]: + r"""Return a callable for the get provisioning config method over gRPC. + + Get ProvisioningConfig by name. + + Returns: + Callable[[~.GetProvisioningConfigRequest], + Awaitable[~.ProvisioningConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_provisioning_config" not in self._stubs: + self._stubs["get_provisioning_config"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/GetProvisioningConfig", + request_serializer=provisioning.GetProvisioningConfigRequest.serialize, + response_deserializer=provisioning.ProvisioningConfig.deserialize, + ) + return self._stubs["get_provisioning_config"] + + @property + def create_provisioning_config( + self, + ) -> Callable[ + [provisioning.CreateProvisioningConfigRequest], + Awaitable[provisioning.ProvisioningConfig], + ]: + r"""Return a callable for the create provisioning config method over gRPC. + + Create new ProvisioningConfig. + + Returns: + Callable[[~.CreateProvisioningConfigRequest], + Awaitable[~.ProvisioningConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_provisioning_config" not in self._stubs: + self._stubs["create_provisioning_config"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateProvisioningConfig", + request_serializer=provisioning.CreateProvisioningConfigRequest.serialize, + response_deserializer=provisioning.ProvisioningConfig.deserialize, + ) + return self._stubs["create_provisioning_config"] + + @property + def update_provisioning_config( + self, + ) -> Callable[ + [provisioning.UpdateProvisioningConfigRequest], + Awaitable[provisioning.ProvisioningConfig], + ]: + r"""Return a callable for the update provisioning config method over gRPC. + + Update existing ProvisioningConfig. + + Returns: + Callable[[~.UpdateProvisioningConfigRequest], + Awaitable[~.ProvisioningConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_provisioning_config" not in self._stubs: + self._stubs["update_provisioning_config"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateProvisioningConfig", + request_serializer=provisioning.UpdateProvisioningConfigRequest.serialize, + response_deserializer=provisioning.ProvisioningConfig.deserialize, + ) + return self._stubs["update_provisioning_config"] + + @property + def rename_network( + self, + ) -> Callable[[network.RenameNetworkRequest], Awaitable[network.Network]]: + r"""Return a callable for the rename network method over gRPC. + + RenameNetwork sets a new name for a network. + Use with caution, previous names become immediately + invalidated. + + Returns: + Callable[[~.RenameNetworkRequest], + Awaitable[~.Network]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rename_network" not in self._stubs: + self._stubs["rename_network"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameNetwork", + request_serializer=network.RenameNetworkRequest.serialize, + response_deserializer=network.Network.deserialize, + ) + return self._stubs["rename_network"] + + @property + def list_os_images( + self, + ) -> Callable[ + [osimage.ListOSImagesRequest], Awaitable[osimage.ListOSImagesResponse] + ]: + r"""Return a callable for the list os images method over gRPC. + + Retrieves the list of OS images which are currently + approved. + + Returns: + Callable[[~.ListOSImagesRequest], + Awaitable[~.ListOSImagesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_images" not in self._stubs: + self._stubs["list_os_images"] = self.grpc_channel.unary_unary( + "/google.cloud.baremetalsolution.v2.BareMetalSolution/ListOSImages", + request_serializer=osimage.ListOSImagesRequest.serialize, + response_deserializer=osimage.ListOSImagesResponse.deserialize, + ) + return self._stubs["list_os_images"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest.py index bc064924f22c..77db03906f17 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest.py @@ -47,16 +47,24 @@ from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share +from google.cloud.bare_metal_solution_v2.types import ( + volume_snapshot as gcb_volume_snapshot, +) from google.cloud.bare_metal_solution_v2.types import instance from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance from google.cloud.bare_metal_solution_v2.types import lun from google.cloud.bare_metal_solution_v2.types import network from google.cloud.bare_metal_solution_v2.types import network as gcb_network from google.cloud.bare_metal_solution_v2.types import nfs_share +from google.cloud.bare_metal_solution_v2.types import osimage, provisioning +from google.cloud.bare_metal_solution_v2.types import ssh_key +from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key from google.cloud.bare_metal_solution_v2.types import volume from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume +from google.cloud.bare_metal_solution_v2.types import volume_snapshot from .base import BareMetalSolutionTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -83,6 +91,54 @@ class BareMetalSolutionRestInterceptor: .. code-block:: python class MyCustomBareMetalSolutionInterceptor(BareMetalSolutionRestInterceptor): + def pre_create_nfs_share(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_nfs_share(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_provisioning_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_provisioning_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_ssh_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_ssh_key(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_volume_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_volume_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_nfs_share(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_nfs_share(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_ssh_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_volume_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_detach_lun(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -91,6 +147,38 @@ def post_detach_lun(self, response): logging.log(f"Received response: {response}") return response + def pre_disable_interactive_serial_console(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_disable_interactive_serial_console(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_enable_interactive_serial_console(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_enable_interactive_serial_console(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_evict_lun(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_evict_lun(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_evict_volume(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_evict_volume(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -123,6 +211,14 @@ def post_get_nfs_share(self, response): logging.log(f"Received response: {response}") return response + def pre_get_provisioning_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_provisioning_config(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_volume(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -131,6 +227,14 @@ def post_get_volume(self, response): logging.log(f"Received response: {response}") return response + def pre_get_volume_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_volume_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_instances(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -171,6 +275,30 @@ def post_list_nfs_shares(self, response): logging.log(f"Received response: {response}") return response + def pre_list_os_images(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_os_images(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_provisioning_quotas(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_provisioning_quotas(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_ssh_keys(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_ssh_keys(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_volumes(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -179,6 +307,46 @@ def post_list_volumes(self, response): logging.log(f"Received response: {response}") return response + def pre_list_volume_snapshots(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_volume_snapshots(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rename_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rename_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rename_network(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rename_network(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rename_nfs_share(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rename_nfs_share(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rename_volume(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rename_volume(self, response): + logging.log(f"Received response: {response}") + return response + def pre_reset_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -195,6 +363,14 @@ def post_resize_volume(self, response): logging.log(f"Received response: {response}") return response + def pre_restore_volume_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_volume_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + def pre_start_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -211,6 +387,14 @@ def post_stop_instance(self, response): logging.log(f"Received response: {response}") return response + def pre_submit_provisioning_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_submit_provisioning_config(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -235,6 +419,14 @@ def post_update_nfs_share(self, response): logging.log(f"Received response: {response}") return response + def pre_update_provisioning_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_provisioning_config(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_volume(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -249,22 +441,22 @@ def post_update_volume(self, response): """ - def pre_detach_lun( + def pre_create_nfs_share( self, - request: gcb_instance.DetachLunRequest, + request: gcb_nfs_share.CreateNfsShareRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[gcb_instance.DetachLunRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for detach_lun + ) -> Tuple[gcb_nfs_share.CreateNfsShareRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_nfs_share Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_detach_lun( + def post_create_nfs_share( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for detach_lun + """Post-rpc interceptor for create_nfs_share Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -272,18 +464,22 @@ def post_detach_lun( """ return response - def pre_get_instance( - self, request: instance.GetInstanceRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[instance.GetInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_instance + def pre_create_provisioning_config( + self, + request: provisioning.CreateProvisioningConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[provisioning.CreateProvisioningConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_provisioning_config Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_get_instance(self, response: instance.Instance) -> instance.Instance: - """Post-rpc interceptor for get_instance + def post_create_provisioning_config( + self, response: provisioning.ProvisioningConfig + ) -> provisioning.ProvisioningConfig: + """Post-rpc interceptor for create_provisioning_config Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -291,18 +487,20 @@ def post_get_instance(self, response: instance.Instance) -> instance.Instance: """ return response - def pre_get_lun( - self, request: lun.GetLunRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[lun.GetLunRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_lun + def pre_create_ssh_key( + self, + request: gcb_ssh_key.CreateSSHKeyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcb_ssh_key.CreateSSHKeyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_ssh_key Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_get_lun(self, response: lun.Lun) -> lun.Lun: - """Post-rpc interceptor for get_lun + def post_create_ssh_key(self, response: gcb_ssh_key.SSHKey) -> gcb_ssh_key.SSHKey: + """Post-rpc interceptor for create_ssh_key Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -310,18 +508,24 @@ def post_get_lun(self, response: lun.Lun) -> lun.Lun: """ return response - def pre_get_network( - self, request: network.GetNetworkRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[network.GetNetworkRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_network + def pre_create_volume_snapshot( + self, + request: gcb_volume_snapshot.CreateVolumeSnapshotRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + gcb_volume_snapshot.CreateVolumeSnapshotRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_volume_snapshot Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_get_network(self, response: network.Network) -> network.Network: - """Post-rpc interceptor for get_network + def post_create_volume_snapshot( + self, response: gcb_volume_snapshot.VolumeSnapshot + ) -> gcb_volume_snapshot.VolumeSnapshot: + """Post-rpc interceptor for create_volume_snapshot Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -329,18 +533,22 @@ def post_get_network(self, response: network.Network) -> network.Network: """ return response - def pre_get_nfs_share( - self, request: nfs_share.GetNfsShareRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[nfs_share.GetNfsShareRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_nfs_share + def pre_delete_nfs_share( + self, + request: nfs_share.DeleteNfsShareRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[nfs_share.DeleteNfsShareRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_nfs_share Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_get_nfs_share(self, response: nfs_share.NfsShare) -> nfs_share.NfsShare: - """Post-rpc interceptor for get_nfs_share + def post_delete_nfs_share( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_nfs_share Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -348,41 +556,44 @@ def post_get_nfs_share(self, response: nfs_share.NfsShare) -> nfs_share.NfsShare """ return response - def pre_get_volume( - self, request: volume.GetVolumeRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[volume.GetVolumeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_volume + def pre_delete_ssh_key( + self, request: ssh_key.DeleteSSHKeyRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[ssh_key.DeleteSSHKeyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_ssh_key Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_get_volume(self, response: volume.Volume) -> volume.Volume: - """Post-rpc interceptor for get_volume + def pre_delete_volume_snapshot( + self, + request: volume_snapshot.DeleteVolumeSnapshotRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[volume_snapshot.DeleteVolumeSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_volume_snapshot - Override in a subclass to manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. """ - return response + return request, metadata - def pre_list_instances( + def pre_detach_lun( self, - request: instance.ListInstancesRequest, + request: gcb_instance.DetachLunRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[instance.ListInstancesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_instances + ) -> Tuple[gcb_instance.DetachLunRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for detach_lun Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_list_instances( - self, response: instance.ListInstancesResponse - ) -> instance.ListInstancesResponse: - """Post-rpc interceptor for list_instances + def post_detach_lun( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for detach_lun Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -390,18 +601,24 @@ def post_list_instances( """ return response - def pre_list_luns( - self, request: lun.ListLunsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[lun.ListLunsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_luns + def pre_disable_interactive_serial_console( + self, + request: instance.DisableInteractiveSerialConsoleRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + instance.DisableInteractiveSerialConsoleRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for disable_interactive_serial_console Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_list_luns(self, response: lun.ListLunsResponse) -> lun.ListLunsResponse: - """Post-rpc interceptor for list_luns + def post_disable_interactive_serial_console( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for disable_interactive_serial_console Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -409,20 +626,24 @@ def post_list_luns(self, response: lun.ListLunsResponse) -> lun.ListLunsResponse """ return response - def pre_list_networks( - self, request: network.ListNetworksRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[network.ListNetworksRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_networks + def pre_enable_interactive_serial_console( + self, + request: instance.EnableInteractiveSerialConsoleRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + instance.EnableInteractiveSerialConsoleRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for enable_interactive_serial_console Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_list_networks( - self, response: network.ListNetworksResponse - ) -> network.ListNetworksResponse: - """Post-rpc interceptor for list_networks + def post_enable_interactive_serial_console( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for enable_interactive_serial_console Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -430,22 +651,20 @@ def post_list_networks( """ return response - def pre_list_network_usage( - self, - request: network.ListNetworkUsageRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[network.ListNetworkUsageRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_network_usage + def pre_evict_lun( + self, request: lun.EvictLunRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[lun.EvictLunRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for evict_lun Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_list_network_usage( - self, response: network.ListNetworkUsageResponse - ) -> network.ListNetworkUsageResponse: - """Post-rpc interceptor for list_network_usage + def post_evict_lun( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for evict_lun Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -453,22 +672,20 @@ def post_list_network_usage( """ return response - def pre_list_nfs_shares( - self, - request: nfs_share.ListNfsSharesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[nfs_share.ListNfsSharesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_nfs_shares + def pre_evict_volume( + self, request: volume.EvictVolumeRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[volume.EvictVolumeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for evict_volume Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_list_nfs_shares( - self, response: nfs_share.ListNfsSharesResponse - ) -> nfs_share.ListNfsSharesResponse: - """Post-rpc interceptor for list_nfs_shares + def post_evict_volume( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for evict_volume Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -476,20 +693,18 @@ def post_list_nfs_shares( """ return response - def pre_list_volumes( - self, request: volume.ListVolumesRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[volume.ListVolumesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_volumes + def pre_get_instance( + self, request: instance.GetInstanceRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[instance.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_list_volumes( - self, response: volume.ListVolumesResponse - ) -> volume.ListVolumesResponse: - """Post-rpc interceptor for list_volumes + def post_get_instance(self, response: instance.Instance) -> instance.Instance: + """Post-rpc interceptor for get_instance Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -497,22 +712,18 @@ def post_list_volumes( """ return response - def pre_reset_instance( - self, - request: instance.ResetInstanceRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[instance.ResetInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for reset_instance + def pre_get_lun( + self, request: lun.GetLunRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[lun.GetLunRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_lun Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_reset_instance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for reset_instance + def post_get_lun(self, response: lun.Lun) -> lun.Lun: + """Post-rpc interceptor for get_lun Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -520,22 +731,18 @@ def post_reset_instance( """ return response - def pre_resize_volume( - self, - request: gcb_volume.ResizeVolumeRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[gcb_volume.ResizeVolumeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for resize_volume + def pre_get_network( + self, request: network.GetNetworkRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[network.GetNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_network Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_resize_volume( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for resize_volume + def post_get_network(self, response: network.Network) -> network.Network: + """Post-rpc interceptor for get_network Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -543,22 +750,41 @@ def post_resize_volume( """ return response - def pre_start_instance( + def pre_get_nfs_share( + self, request: nfs_share.GetNfsShareRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[nfs_share.GetNfsShareRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_nfs_share + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_get_nfs_share(self, response: nfs_share.NfsShare) -> nfs_share.NfsShare: + """Post-rpc interceptor for get_nfs_share + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_get_provisioning_config( self, - request: instance.StartInstanceRequest, + request: provisioning.GetProvisioningConfigRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[instance.StartInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for start_instance + ) -> Tuple[provisioning.GetProvisioningConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_provisioning_config Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_start_instance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for start_instance + def post_get_provisioning_config( + self, response: provisioning.ProvisioningConfig + ) -> provisioning.ProvisioningConfig: + """Post-rpc interceptor for get_provisioning_config Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -566,20 +792,18 @@ def post_start_instance( """ return response - def pre_stop_instance( - self, request: instance.StopInstanceRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[instance.StopInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for stop_instance + def pre_get_volume( + self, request: volume.GetVolumeRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[volume.GetVolumeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_volume Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_stop_instance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for stop_instance + def post_get_volume(self, response: volume.Volume) -> volume.Volume: + """Post-rpc interceptor for get_volume Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -587,22 +811,22 @@ def post_stop_instance( """ return response - def pre_update_instance( + def pre_get_volume_snapshot( self, - request: gcb_instance.UpdateInstanceRequest, + request: volume_snapshot.GetVolumeSnapshotRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[gcb_instance.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_instance + ) -> Tuple[volume_snapshot.GetVolumeSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_volume_snapshot Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_update_instance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_instance + def post_get_volume_snapshot( + self, response: volume_snapshot.VolumeSnapshot + ) -> volume_snapshot.VolumeSnapshot: + """Post-rpc interceptor for get_volume_snapshot Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -610,22 +834,22 @@ def post_update_instance( """ return response - def pre_update_network( + def pre_list_instances( self, - request: gcb_network.UpdateNetworkRequest, + request: instance.ListInstancesRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[gcb_network.UpdateNetworkRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_network + ) -> Tuple[instance.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_update_network( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_network + def post_list_instances( + self, response: instance.ListInstancesResponse + ) -> instance.ListInstancesResponse: + """Post-rpc interceptor for list_instances Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -633,22 +857,18 @@ def post_update_network( """ return response - def pre_update_nfs_share( - self, - request: gcb_nfs_share.UpdateNfsShareRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[gcb_nfs_share.UpdateNfsShareRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_nfs_share + def pre_list_luns( + self, request: lun.ListLunsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[lun.ListLunsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_luns Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_update_nfs_share( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_nfs_share + def post_list_luns(self, response: lun.ListLunsResponse) -> lun.ListLunsResponse: + """Post-rpc interceptor for list_luns Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -656,22 +876,20 @@ def post_update_nfs_share( """ return response - def pre_update_volume( - self, - request: gcb_volume.UpdateVolumeRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[gcb_volume.UpdateVolumeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_volume + def pre_list_networks( + self, request: network.ListNetworksRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[network.ListNetworksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_networks Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_update_volume( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_volume + def post_list_networks( + self, response: network.ListNetworksResponse + ) -> network.ListNetworksResponse: + """Post-rpc interceptor for list_networks Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -679,22 +897,22 @@ def post_update_volume( """ return response - def pre_get_location( + def pre_list_network_usage( self, - request: locations_pb2.GetLocationRequest, + request: network.ListNetworkUsageRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_location + ) -> Tuple[network.ListNetworkUsageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_network_usage Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location + def post_list_network_usage( + self, response: network.ListNetworkUsageResponse + ) -> network.ListNetworkUsageResponse: + """Post-rpc interceptor for list_network_usage Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -702,22 +920,22 @@ def post_get_location( """ return response - def pre_list_locations( + def pre_list_nfs_shares( self, - request: locations_pb2.ListLocationsRequest, + request: nfs_share.ListNfsSharesRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_locations + ) -> Tuple[nfs_share.ListNfsSharesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_nfs_shares Override in a subclass to manipulate the request or metadata before they are sent to the BareMetalSolution server. """ return request, metadata - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations + def post_list_nfs_shares( + self, response: nfs_share.ListNfsSharesResponse + ) -> nfs_share.ListNfsSharesResponse: + """Post-rpc interceptor for list_nfs_shares Override in a subclass to manipulate the response after it is returned by the BareMetalSolution server but before @@ -725,142 +943,2732 @@ def post_list_locations( """ return response + def pre_list_os_images( + self, request: osimage.ListOSImagesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[osimage.ListOSImagesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_os_images -@dataclasses.dataclass -class BareMetalSolutionRestStub: - _session: AuthorizedSession - _host: str - _interceptor: BareMetalSolutionRestInterceptor + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + def post_list_os_images( + self, response: osimage.ListOSImagesResponse + ) -> osimage.ListOSImagesResponse: + """Post-rpc interceptor for list_os_images -class BareMetalSolutionRestTransport(BareMetalSolutionTransport): - """REST backend transport for BareMetalSolution. + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response - Performs management operations on Bare Metal Solution servers. + def pre_list_provisioning_quotas( + self, + request: provisioning.ListProvisioningQuotasRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[provisioning.ListProvisioningQuotasRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_provisioning_quotas - The ``baremetalsolution.googleapis.com`` service provides management - capabilities for Bare Metal Solution servers. To access the API - methods, you must assign Bare Metal Solution IAM roles containing - the desired permissions to your staff in your Google Cloud project. - You must also enable the Bare Metal Solution API. Once enabled, the - methods act upon specific servers in your Bare Metal Solution - environment. + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. + def post_list_provisioning_quotas( + self, response: provisioning.ListProvisioningQuotasResponse + ) -> provisioning.ListProvisioningQuotasResponse: + """Post-rpc interceptor for list_provisioning_quotas - It sends JSON representations of protocol buffers over HTTP/1.1 + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response - """ + def pre_list_ssh_keys( + self, request: ssh_key.ListSSHKeysRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[ssh_key.ListSSHKeysRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_ssh_keys - def __init__( - self, - *, - host: str = "baremetalsolution.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[BareMetalSolutionRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. + def post_list_ssh_keys( + self, response: ssh_key.ListSSHKeysResponse + ) -> ssh_key.ListSSHKeysResponse: + """Post-rpc interceptor for list_ssh_keys - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER + return response + + def pre_list_volumes( + self, request: volume.ListVolumesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[volume.ListVolumesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_volumes + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_list_volumes( + self, response: volume.ListVolumesResponse + ) -> volume.ListVolumesResponse: + """Post-rpc interceptor for list_volumes + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_list_volume_snapshots( + self, + request: volume_snapshot.ListVolumeSnapshotsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[volume_snapshot.ListVolumeSnapshotsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_volume_snapshots + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_list_volume_snapshots( + self, response: volume_snapshot.ListVolumeSnapshotsResponse + ) -> volume_snapshot.ListVolumeSnapshotsResponse: + """Post-rpc interceptor for list_volume_snapshots + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_rename_instance( + self, + request: instance.RenameInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[instance.RenameInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for rename_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_rename_instance(self, response: instance.Instance) -> instance.Instance: + """Post-rpc interceptor for rename_instance + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_rename_network( + self, request: network.RenameNetworkRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[network.RenameNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for rename_network + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_rename_network(self, response: network.Network) -> network.Network: + """Post-rpc interceptor for rename_network + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_rename_nfs_share( + self, + request: nfs_share.RenameNfsShareRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[nfs_share.RenameNfsShareRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for rename_nfs_share + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_rename_nfs_share(self, response: nfs_share.NfsShare) -> nfs_share.NfsShare: + """Post-rpc interceptor for rename_nfs_share + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_rename_volume( + self, request: volume.RenameVolumeRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[volume.RenameVolumeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for rename_volume + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_rename_volume(self, response: volume.Volume) -> volume.Volume: + """Post-rpc interceptor for rename_volume + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_reset_instance( + self, + request: instance.ResetInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[instance.ResetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reset_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_reset_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for reset_instance + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_resize_volume( + self, + request: gcb_volume.ResizeVolumeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcb_volume.ResizeVolumeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resize_volume + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_resize_volume( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for resize_volume + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_restore_volume_snapshot( + self, + request: gcb_volume_snapshot.RestoreVolumeSnapshotRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + gcb_volume_snapshot.RestoreVolumeSnapshotRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for restore_volume_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_restore_volume_snapshot( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_volume_snapshot + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_start_instance( + self, + request: instance.StartInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[instance.StartInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_start_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for start_instance + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_stop_instance( + self, request: instance.StopInstanceRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[instance.StopInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_stop_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for stop_instance + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_submit_provisioning_config( + self, + request: provisioning.SubmitProvisioningConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[provisioning.SubmitProvisioningConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for submit_provisioning_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_submit_provisioning_config( + self, response: provisioning.SubmitProvisioningConfigResponse + ) -> provisioning.SubmitProvisioningConfigResponse: + """Post-rpc interceptor for submit_provisioning_config + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_update_instance( + self, + request: gcb_instance.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcb_instance.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_update_network( + self, + request: gcb_network.UpdateNetworkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcb_network.UpdateNetworkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_network + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_update_network( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_network + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_update_nfs_share( + self, + request: gcb_nfs_share.UpdateNfsShareRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcb_nfs_share.UpdateNfsShareRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_nfs_share + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_update_nfs_share( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_nfs_share + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_update_provisioning_config( + self, + request: provisioning.UpdateProvisioningConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[provisioning.UpdateProvisioningConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_provisioning_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_update_provisioning_config( + self, response: provisioning.ProvisioningConfig + ) -> provisioning.ProvisioningConfig: + """Post-rpc interceptor for update_provisioning_config + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_update_volume( + self, + request: gcb_volume.UpdateVolumeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcb_volume.UpdateVolumeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_volume + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_update_volume( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_volume + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BareMetalSolution server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the BareMetalSolution server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BareMetalSolutionRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BareMetalSolutionRestInterceptor + + +class BareMetalSolutionRestTransport(BareMetalSolutionTransport): + """REST backend transport for BareMetalSolution. + + Performs management operations on Bare Metal Solution servers. + + The ``baremetalsolution.googleapis.com`` service provides management + capabilities for Bare Metal Solution servers. To access the API + methods, you must assign Bare Metal Solution IAM roles containing + the desired permissions to your staff in your Google Cloud project. + You must also enable the Bare Metal Solution API. Once enabled, the + methods act upon specific servers in your Bare Metal Solution + environment. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "baremetalsolution.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BareMetalSolutionRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BareMetalSolutionRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = {} + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v2", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateNfsShare(BareMetalSolutionRestStub): + def __hash__(self): + return hash("CreateNfsShare") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcb_nfs_share.CreateNfsShareRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create nfs share method over HTTP. + + Args: + request (~.gcb_nfs_share.CreateNfsShareRequest): + The request object. Message for creating an NFS share. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/nfsShares", + "body": "nfs_share", + }, + ] + request, metadata = self._interceptor.pre_create_nfs_share( + request, metadata + ) + pb_request = gcb_nfs_share.CreateNfsShareRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_nfs_share(resp) + return resp + + class _CreateProvisioningConfig(BareMetalSolutionRestStub): + def __hash__(self): + return hash("CreateProvisioningConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: provisioning.CreateProvisioningConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> provisioning.ProvisioningConfig: + r"""Call the create provisioning + config method over HTTP. + + Args: + request (~.provisioning.CreateProvisioningConfigRequest): + The request object. Request for CreateProvisioningConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.provisioning.ProvisioningConfig: + A provisioning configuration. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/provisioningConfigs", + "body": "provisioning_config", + }, + ] + request, metadata = self._interceptor.pre_create_provisioning_config( + request, metadata + ) + pb_request = provisioning.CreateProvisioningConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = provisioning.ProvisioningConfig() + pb_resp = provisioning.ProvisioningConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_provisioning_config(resp) + return resp + + class _CreateSSHKey(BareMetalSolutionRestStub): + def __hash__(self): + return hash("CreateSSHKey") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "sshKeyId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcb_ssh_key.CreateSSHKeyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcb_ssh_key.SSHKey: + r"""Call the create ssh key method over HTTP. + + Args: + request (~.gcb_ssh_key.CreateSSHKeyRequest): + The request object. Message for registering a public SSH + key in a project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcb_ssh_key.SSHKey: + An SSH key, used for authorizing with + the interactive serial console feature. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/sshKeys", + "body": "ssh_key", + }, + ] + request, metadata = self._interceptor.pre_create_ssh_key(request, metadata) + pb_request = gcb_ssh_key.CreateSSHKeyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcb_ssh_key.SSHKey() + pb_resp = gcb_ssh_key.SSHKey.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_ssh_key(resp) + return resp + + class _CreateVolumeSnapshot(BareMetalSolutionRestStub): + def __hash__(self): + return hash("CreateVolumeSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcb_volume_snapshot.CreateVolumeSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcb_volume_snapshot.VolumeSnapshot: + r"""Call the create volume snapshot method over HTTP. + + Args: + request (~.gcb_volume_snapshot.CreateVolumeSnapshotRequest): + The request object. Message for creating a volume + snapshot. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcb_volume_snapshot.VolumeSnapshot: + A snapshot of a volume. Only boot + volumes can have snapshots. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*/volumes/*}/snapshots", + "body": "volume_snapshot", + }, + ] + request, metadata = self._interceptor.pre_create_volume_snapshot( + request, metadata + ) + pb_request = gcb_volume_snapshot.CreateVolumeSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcb_volume_snapshot.VolumeSnapshot() + pb_resp = gcb_volume_snapshot.VolumeSnapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_volume_snapshot(resp) + return resp + + class _DeleteNfsShare(BareMetalSolutionRestStub): + def __hash__(self): + return hash("DeleteNfsShare") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: nfs_share.DeleteNfsShareRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete nfs share method over HTTP. + + Args: + request (~.nfs_share.DeleteNfsShareRequest): + The request object. Message for deleting an NFS share. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/nfsShares/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_nfs_share( + request, metadata + ) + pb_request = nfs_share.DeleteNfsShareRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_nfs_share(resp) + return resp + + class _DeleteSSHKey(BareMetalSolutionRestStub): + def __hash__(self): + return hash("DeleteSSHKey") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ssh_key.DeleteSSHKeyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete ssh key method over HTTP. + + Args: + request (~.ssh_key.DeleteSSHKeyRequest): + The request object. Message for deleting an SSH key from + a project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/sshKeys/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_ssh_key(request, metadata) + pb_request = ssh_key.DeleteSSHKeyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteVolumeSnapshot(BareMetalSolutionRestStub): + def __hash__(self): + return hash("DeleteVolumeSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: volume_snapshot.DeleteVolumeSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete volume snapshot method over HTTP. + + Args: + request (~.volume_snapshot.DeleteVolumeSnapshotRequest): + The request object. Message for deleting named Volume + snapshot. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/volumes/*/snapshots/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_volume_snapshot( + request, metadata + ) + pb_request = volume_snapshot.DeleteVolumeSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DetachLun(BareMetalSolutionRestStub): + def __hash__(self): + return hash("DetachLun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcb_instance.DetachLunRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the detach lun method over HTTP. + + Args: + request (~.gcb_instance.DetachLunRequest): + The request object. Message for detach specific LUN from + an Instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{instance=projects/*/locations/*/instances/*}:detachLun", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_detach_lun(request, metadata) + pb_request = gcb_instance.DetachLunRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_detach_lun(resp) + return resp + + class _DisableInteractiveSerialConsole(BareMetalSolutionRestStub): + def __hash__(self): + return hash("DisableInteractiveSerialConsole") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: instance.DisableInteractiveSerialConsoleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the disable interactive + serial console method over HTTP. + + Args: + request (~.instance.DisableInteractiveSerialConsoleRequest): + The request object. Message for disabling the interactive + serial console on an instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/instances/*}:disableInteractiveSerialConsole", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_disable_interactive_serial_console( + request, metadata + ) + pb_request = instance.DisableInteractiveSerialConsoleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_disable_interactive_serial_console(resp) + return resp + + class _EnableInteractiveSerialConsole(BareMetalSolutionRestStub): + def __hash__(self): + return hash("EnableInteractiveSerialConsole") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: instance.EnableInteractiveSerialConsoleRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the enable interactive serial + console method over HTTP. + + Args: + request (~.instance.EnableInteractiveSerialConsoleRequest): + The request object. Message for enabling the interactive + serial console on an instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/instances/*}:enableInteractiveSerialConsole", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_enable_interactive_serial_console( + request, metadata + ) + pb_request = instance.EnableInteractiveSerialConsoleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_enable_interactive_serial_console(resp) + return resp + + class _EvictLun(BareMetalSolutionRestStub): + def __hash__(self): + return hash("EvictLun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: lun.EvictLunRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the evict lun method over HTTP. + + Args: + request (~.lun.EvictLunRequest): + The request object. Request for skip lun cooloff and + delete it. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/volumes/*/luns/*}:evict", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_evict_lun(request, metadata) + pb_request = lun.EvictLunRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_evict_lun(resp) + return resp + + class _EvictVolume(BareMetalSolutionRestStub): + def __hash__(self): + return hash("EvictVolume") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: volume.EvictVolumeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the evict volume method over HTTP. + + Args: + request (~.volume.EvictVolumeRequest): + The request object. Request for skip volume cooloff and + delete it. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/volumes/*}:evict", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_evict_volume(request, metadata) + pb_request = volume.EvictVolumeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_evict_volume(resp) + return resp + + class _GetInstance(BareMetalSolutionRestStub): + def __hash__(self): + return hash("GetInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: instance.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instance.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.instance.GetInstanceRequest): + The request object. Message for requesting server + information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.instance.Instance: + A server. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/instances/*}", + }, + ] + request, metadata = self._interceptor.pre_get_instance(request, metadata) + pb_request = instance.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = instance.Instance() + pb_resp = instance.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + return resp + + class _GetLun(BareMetalSolutionRestStub): + def __hash__(self): + return hash("GetLun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: lun.GetLunRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> lun.Lun: + r"""Call the get lun method over HTTP. + + Args: + request (~.lun.GetLunRequest): + The request object. Message for requesting storage lun + information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.lun.Lun: + A storage volume logical unit number + (LUN). + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/volumes/*/luns/*}", + }, + ] + request, metadata = self._interceptor.pre_get_lun(request, metadata) + pb_request = lun.GetLunRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = lun.Lun() + pb_resp = lun.Lun.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_lun(resp) + return resp + + class _GetNetwork(BareMetalSolutionRestStub): + def __hash__(self): + return hash("GetNetwork") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: network.GetNetworkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> network.Network: + r"""Call the get network method over HTTP. + + Args: + request (~.network.GetNetworkRequest): + The request object. Message for requesting network + information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.network.Network: + A Network. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/networks/*}", + }, + ] + request, metadata = self._interceptor.pre_get_network(request, metadata) + pb_request = network.GetNetworkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = network.Network() + pb_resp = network.Network.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_network(resp) + return resp + + class _GetNfsShare(BareMetalSolutionRestStub): + def __hash__(self): + return hash("GetNfsShare") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: nfs_share.GetNfsShareRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> nfs_share.NfsShare: + r"""Call the get nfs share method over HTTP. + + Args: + request (~.nfs_share.GetNfsShareRequest): + The request object. Message for requesting NFS share + information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.nfs_share.NfsShare: + An NFS share. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/nfsShares/*}", + }, + ] + request, metadata = self._interceptor.pre_get_nfs_share(request, metadata) + pb_request = nfs_share.GetNfsShareRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = nfs_share.NfsShare() + pb_resp = nfs_share.NfsShare.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_nfs_share(resp) + return resp + + class _GetProvisioningConfig(BareMetalSolutionRestStub): + def __hash__(self): + return hash("GetProvisioningConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: provisioning.GetProvisioningConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> provisioning.ProvisioningConfig: + r"""Call the get provisioning config method over HTTP. + + Args: + request (~.provisioning.GetProvisioningConfigRequest): + The request object. Request for GetProvisioningConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.provisioning.ProvisioningConfig: + A provisioning configuration. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/provisioningConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_provisioning_config( + request, metadata + ) + pb_request = provisioning.GetProvisioningConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = provisioning.ProvisioningConfig() + pb_resp = provisioning.ProvisioningConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_provisioning_config(resp) + return resp + + class _GetVolume(BareMetalSolutionRestStub): + def __hash__(self): + return hash("GetVolume") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: volume.GetVolumeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> volume.Volume: + r"""Call the get volume method over HTTP. + + Args: + request (~.volume.GetVolumeRequest): + The request object. Message for requesting storage volume + information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.volume.Volume: + A storage volume. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/volumes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_volume(request, metadata) + pb_request = volume.GetVolumeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = volume.Volume() + pb_resp = volume.Volume.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_volume(resp) + return resp + + class _GetVolumeSnapshot(BareMetalSolutionRestStub): + def __hash__(self): + return hash("GetVolumeSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: volume_snapshot.GetVolumeSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> volume_snapshot.VolumeSnapshot: + r"""Call the get volume snapshot method over HTTP. + + Args: + request (~.volume_snapshot.GetVolumeSnapshotRequest): + The request object. Message for requesting volume + snapshot information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.volume_snapshot.VolumeSnapshot: + A snapshot of a volume. Only boot + volumes can have snapshots. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/volumes/*/snapshots/*}", + }, + ] + request, metadata = self._interceptor.pre_get_volume_snapshot( + request, metadata + ) + pb_request = volume_snapshot.GetVolumeSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = volume_snapshot.VolumeSnapshot() + pb_resp = volume_snapshot.VolumeSnapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_volume_snapshot(resp) + return resp + + class _ListInstances(BareMetalSolutionRestStub): + def __hash__(self): + return hash("ListInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: instance.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instance.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.instance.ListInstancesRequest): + The request object. Message for requesting the list of + servers. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.instance.ListInstancesResponse: + Response message for the list of + servers. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/instances", + }, + ] + request, metadata = self._interceptor.pre_list_instances(request, metadata) + pb_request = instance.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = instance.ListInstancesResponse() + pb_resp = instance.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _ListLuns(BareMetalSolutionRestStub): + def __hash__(self): + return hash("ListLuns") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: lun.ListLunsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> lun.ListLunsResponse: + r"""Call the list luns method over HTTP. + + Args: + request (~.lun.ListLunsRequest): + The request object. Message for requesting a list of + storage volume luns. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.lun.ListLunsResponse: + Response message containing the list + of storage volume luns. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*/volumes/*}/luns", + }, + ] + request, metadata = self._interceptor.pre_list_luns(request, metadata) + pb_request = lun.ListLunsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = lun.ListLunsResponse() + pb_resp = lun.ListLunsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_luns(resp) + return resp + + class _ListNetworks(BareMetalSolutionRestStub): + def __hash__(self): + return hash("ListNetworks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: network.ListNetworksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> network.ListNetworksResponse: + r"""Call the list networks method over HTTP. + + Args: + request (~.network.ListNetworksRequest): + The request object. Message for requesting a list of + networks. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.network.ListNetworksResponse: + Response message containing the list + of networks. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/networks", + }, + ] + request, metadata = self._interceptor.pre_list_networks(request, metadata) + pb_request = network.ListNetworksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = network.ListNetworksResponse() + pb_resp = network.ListNetworksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_networks(resp) + return resp + + class _ListNetworkUsage(BareMetalSolutionRestStub): + def __hash__(self): + return hash("ListNetworkUsage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } - url_match_items = maybe_url_match.groupdict() + def __call__( + self, + request: network.ListNetworkUsageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> network.ListNetworkUsageResponse: + r"""Call the list network usage method over HTTP. - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + Args: + request (~.network.ListNetworkUsageRequest): + The request object. Request to get networks with IPs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or BareMetalSolutionRestInterceptor() - self._prep_wrapped_messages(client_info) + Returns: + ~.network.ListNetworkUsageResponse: + Response with Networks with IPs + """ - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{location=projects/*/locations/*}/networks:listNetworkUsage", + }, + ] + request, metadata = self._interceptor.pre_list_network_usage( + request, metadata + ) + pb_request = network.ListNetworkUsageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = {} + uri = transcoded_request["uri"] + method = transcoded_request["method"] - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v2", + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) ) + query_params.update(self._get_unset_required_fields(query_params)) - self._operations_client = operations_v1.AbstractOperationsClient( - transport=rest_transport + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), ) - # Return the client from cache. - return self._operations_client + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) - class _DetachLun(BareMetalSolutionRestStub): + # Return the response + resp = network.ListNetworkUsageResponse() + pb_resp = network.ListNetworkUsageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_network_usage(resp) + return resp + + class _ListNfsShares(BareMetalSolutionRestStub): def __hash__(self): - return hash("DetachLun") + return hash("ListNfsShares") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -874,18 +3682,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: gcb_instance.DetachLunRequest, + request: nfs_share.ListNfsSharesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the detach lun method over HTTP. + ) -> nfs_share.ListNfsSharesResponse: + r"""Call the list nfs shares method over HTTP. Args: - request (~.gcb_instance.DetachLunRequest): - The request object. Message for detach specific LUN from - an Instance. + request (~.nfs_share.ListNfsSharesRequest): + The request object. Message for requesting a list of NFS + shares. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -893,31 +3701,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.nfs_share.ListNfsSharesResponse: + Response message containing the list + of NFS shares. """ http_options: List[Dict[str, str]] = [ { - "method": "post", - "uri": "/v2/{instance=projects/*/locations/*/instances/*}:detachLun", - "body": "*", + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/nfsShares", }, ] - request, metadata = self._interceptor.pre_detach_lun(request, metadata) - pb_request = gcb_instance.DetachLunRequest.pb(request) + request, metadata = self._interceptor.pre_list_nfs_shares(request, metadata) + pb_request = nfs_share.ListNfsSharesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, - ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -941,7 +3740,6 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -950,14 +3748,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_detach_lun(resp) + resp = nfs_share.ListNfsSharesResponse() + pb_resp = nfs_share.ListNfsSharesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_nfs_shares(resp) return resp - class _GetInstance(BareMetalSolutionRestStub): + class _ListOSImages(BareMetalSolutionRestStub): def __hash__(self): - return hash("GetInstance") + return hash("ListOSImages") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -971,18 +3771,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: instance.GetInstanceRequest, + request: osimage.ListOSImagesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> instance.Instance: - r"""Call the get instance method over HTTP. + ) -> osimage.ListOSImagesResponse: + r"""Call the list os images method over HTTP. Args: - request (~.instance.GetInstanceRequest): - The request object. Message for requesting server - information. + request (~.osimage.ListOSImagesRequest): + The request object. Request for getting all available OS + images. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -990,18 +3790,20 @@ def __call__( sent along with the request as metadata. Returns: - ~.instance.Instance: - A server. + ~.osimage.ListOSImagesResponse: + Request for getting all available OS + images. + """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v2/{name=projects/*/locations/*/instances/*}", + "uri": "/v2/{parent=projects/*/locations/*}/osImages", }, ] - request, metadata = self._interceptor.pre_get_instance(request, metadata) - pb_request = instance.GetInstanceRequest.pb(request) + request, metadata = self._interceptor.pre_list_os_images(request, metadata) + pb_request = osimage.ListOSImagesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1035,16 +3837,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = instance.Instance() - pb_resp = instance.Instance.pb(resp) + resp = osimage.ListOSImagesResponse() + pb_resp = osimage.ListOSImagesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_instance(resp) + resp = self._interceptor.post_list_os_images(resp) return resp - class _GetLun(BareMetalSolutionRestStub): + class _ListProvisioningQuotas(BareMetalSolutionRestStub): def __hash__(self): - return hash("GetLun") + return hash("ListProvisioningQuotas") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1058,18 +3860,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: lun.GetLunRequest, + request: provisioning.ListProvisioningQuotasRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> lun.Lun: - r"""Call the get lun method over HTTP. + ) -> provisioning.ListProvisioningQuotasResponse: + r"""Call the list provisioning quotas method over HTTP. Args: - request (~.lun.GetLunRequest): - The request object. Message for requesting storage lun - information. + request (~.provisioning.ListProvisioningQuotasRequest): + The request object. Message for requesting the list of + provisioning quotas. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1077,20 +3879,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.lun.Lun: - A storage volume logical unit number - (LUN). + ~.provisioning.ListProvisioningQuotasResponse: + Response message for the list of + provisioning quotas. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v2/{name=projects/*/locations/*/volumes/*/luns/*}", + "uri": "/v2/{parent=projects/*/locations/*}/provisioningQuotas", }, ] - request, metadata = self._interceptor.pre_get_lun(request, metadata) - pb_request = lun.GetLunRequest.pb(request) + request, metadata = self._interceptor.pre_list_provisioning_quotas( + request, metadata + ) + pb_request = provisioning.ListProvisioningQuotasRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1124,16 +3928,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = lun.Lun() - pb_resp = lun.Lun.pb(resp) + resp = provisioning.ListProvisioningQuotasResponse() + pb_resp = provisioning.ListProvisioningQuotasResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_lun(resp) + resp = self._interceptor.post_list_provisioning_quotas(resp) return resp - class _GetNetwork(BareMetalSolutionRestStub): + class _ListSSHKeys(BareMetalSolutionRestStub): def __hash__(self): - return hash("GetNetwork") + return hash("ListSSHKeys") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1147,18 +3951,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: network.GetNetworkRequest, + request: ssh_key.ListSSHKeysRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> network.Network: - r"""Call the get network method over HTTP. + ) -> ssh_key.ListSSHKeysResponse: + r"""Call the list ssh keys method over HTTP. Args: - request (~.network.GetNetworkRequest): - The request object. Message for requesting network - information. + request (~.ssh_key.ListSSHKeysRequest): + The request object. Message for listing the public SSH + keys in a project. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1166,18 +3970,18 @@ def __call__( sent along with the request as metadata. Returns: - ~.network.Network: - A Network. + ~.ssh_key.ListSSHKeysResponse: + Message for response of ListSSHKeys. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v2/{name=projects/*/locations/*/networks/*}", + "uri": "/v2/{parent=projects/*/locations/*}/sshKeys", }, ] - request, metadata = self._interceptor.pre_get_network(request, metadata) - pb_request = network.GetNetworkRequest.pb(request) + request, metadata = self._interceptor.pre_list_ssh_keys(request, metadata) + pb_request = ssh_key.ListSSHKeysRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1211,16 +4015,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = network.Network() - pb_resp = network.Network.pb(resp) + resp = ssh_key.ListSSHKeysResponse() + pb_resp = ssh_key.ListSSHKeysResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_network(resp) + resp = self._interceptor.post_list_ssh_keys(resp) return resp - class _GetNfsShare(BareMetalSolutionRestStub): + class _ListVolumes(BareMetalSolutionRestStub): def __hash__(self): - return hash("GetNfsShare") + return hash("ListVolumes") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1234,18 +4038,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: nfs_share.GetNfsShareRequest, + request: volume.ListVolumesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> nfs_share.NfsShare: - r"""Call the get nfs share method over HTTP. + ) -> volume.ListVolumesResponse: + r"""Call the list volumes method over HTTP. Args: - request (~.nfs_share.GetNfsShareRequest): - The request object. Message for requesting NFS share - information. + request (~.volume.ListVolumesRequest): + The request object. Message for requesting a list of + storage volumes. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1253,18 +4057,20 @@ def __call__( sent along with the request as metadata. Returns: - ~.nfs_share.NfsShare: - An NFS share. + ~.volume.ListVolumesResponse: + Response message containing the list + of storage volumes. + """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v2/{name=projects/*/locations/*/nfsShares/*}", + "uri": "/v2/{parent=projects/*/locations/*}/volumes", }, ] - request, metadata = self._interceptor.pre_get_nfs_share(request, metadata) - pb_request = nfs_share.GetNfsShareRequest.pb(request) + request, metadata = self._interceptor.pre_list_volumes(request, metadata) + pb_request = volume.ListVolumesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1298,16 +4104,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = nfs_share.NfsShare() - pb_resp = nfs_share.NfsShare.pb(resp) + resp = volume.ListVolumesResponse() + pb_resp = volume.ListVolumesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_nfs_share(resp) + resp = self._interceptor.post_list_volumes(resp) return resp - class _GetVolume(BareMetalSolutionRestStub): + class _ListVolumeSnapshots(BareMetalSolutionRestStub): def __hash__(self): - return hash("GetVolume") + return hash("ListVolumeSnapshots") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1321,18 +4127,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: volume.GetVolumeRequest, + request: volume_snapshot.ListVolumeSnapshotsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> volume.Volume: - r"""Call the get volume method over HTTP. + ) -> volume_snapshot.ListVolumeSnapshotsResponse: + r"""Call the list volume snapshots method over HTTP. Args: - request (~.volume.GetVolumeRequest): - The request object. Message for requesting storage volume - information. + request (~.volume_snapshot.ListVolumeSnapshotsRequest): + The request object. Message for requesting a list of + volume snapshots. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1340,18 +4146,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.volume.Volume: - A storage volume. + ~.volume_snapshot.ListVolumeSnapshotsResponse: + Response message containing the list + of volume snapshots. + """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v2/{name=projects/*/locations/*/volumes/*}", + "uri": "/v2/{parent=projects/*/locations/*/volumes/*}/snapshots", }, - ] - request, metadata = self._interceptor.pre_get_volume(request, metadata) - pb_request = volume.GetVolumeRequest.pb(request) + ] + request, metadata = self._interceptor.pre_list_volume_snapshots( + request, metadata + ) + pb_request = volume_snapshot.ListVolumeSnapshotsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1385,16 +4195,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = volume.Volume() - pb_resp = volume.Volume.pb(resp) + resp = volume_snapshot.ListVolumeSnapshotsResponse() + pb_resp = volume_snapshot.ListVolumeSnapshotsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_volume(resp) + resp = self._interceptor.post_list_volume_snapshots(resp) return resp - class _ListInstances(BareMetalSolutionRestStub): + class _RenameInstance(BareMetalSolutionRestStub): def __hash__(self): - return hash("ListInstances") + return hash("RenameInstance") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1408,18 +4218,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: instance.ListInstancesRequest, + request: instance.RenameInstanceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> instance.ListInstancesResponse: - r"""Call the list instances method over HTTP. + ) -> instance.Instance: + r"""Call the rename instance method over HTTP. Args: - request (~.instance.ListInstancesRequest): - The request object. Message for requesting the list of - servers. + request (~.instance.RenameInstanceRequest): + The request object. Message requesting rename of a + server. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1427,22 +4237,28 @@ def __call__( sent along with the request as metadata. Returns: - ~.instance.ListInstancesResponse: - Response message for the list of - servers. - + ~.instance.Instance: + A server. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v2/{parent=projects/*/locations/*}/instances", + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/instances/*}:rename", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_instances(request, metadata) - pb_request = instance.ListInstancesRequest.pb(request) + request, metadata = self._interceptor.pre_rename_instance(request, metadata) + pb_request = instance.RenameInstanceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1466,6 +4282,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1474,16 +4291,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = instance.ListInstancesResponse() - pb_resp = instance.ListInstancesResponse.pb(resp) + resp = instance.Instance() + pb_resp = instance.Instance.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_instances(resp) + resp = self._interceptor.post_rename_instance(resp) return resp - class _ListLuns(BareMetalSolutionRestStub): + class _RenameNetwork(BareMetalSolutionRestStub): def __hash__(self): - return hash("ListLuns") + return hash("RenameNetwork") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1497,18 +4314,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: lun.ListLunsRequest, + request: network.RenameNetworkRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> lun.ListLunsResponse: - r"""Call the list luns method over HTTP. + ) -> network.Network: + r"""Call the rename network method over HTTP. Args: - request (~.lun.ListLunsRequest): - The request object. Message for requesting a list of - storage volume luns. + request (~.network.RenameNetworkRequest): + The request object. Message requesting rename of a + server. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1516,22 +4333,28 @@ def __call__( sent along with the request as metadata. Returns: - ~.lun.ListLunsResponse: - Response message containing the list - of storage volume luns. - + ~.network.Network: + A Network. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v2/{parent=projects/*/locations/*/volumes/*}/luns", + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/networks/*}:rename", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_luns(request, metadata) - pb_request = lun.ListLunsRequest.pb(request) + request, metadata = self._interceptor.pre_rename_network(request, metadata) + pb_request = network.RenameNetworkRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1555,6 +4378,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1563,16 +4387,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = lun.ListLunsResponse() - pb_resp = lun.ListLunsResponse.pb(resp) + resp = network.Network() + pb_resp = network.Network.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_luns(resp) + resp = self._interceptor.post_rename_network(resp) return resp - class _ListNetworks(BareMetalSolutionRestStub): + class _RenameNfsShare(BareMetalSolutionRestStub): def __hash__(self): - return hash("ListNetworks") + return hash("RenameNfsShare") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1586,18 +4410,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: network.ListNetworksRequest, + request: nfs_share.RenameNfsShareRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> network.ListNetworksResponse: - r"""Call the list networks method over HTTP. + ) -> nfs_share.NfsShare: + r"""Call the rename nfs share method over HTTP. Args: - request (~.network.ListNetworksRequest): - The request object. Message for requesting a list of - networks. + request (~.nfs_share.RenameNfsShareRequest): + The request object. Message requesting rename of a + server. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1605,22 +4429,30 @@ def __call__( sent along with the request as metadata. Returns: - ~.network.ListNetworksResponse: - Response message containing the list - of networks. - + ~.nfs_share.NfsShare: + An NFS share. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v2/{parent=projects/*/locations/*}/networks", + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/nfsShares/*}:rename", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_networks(request, metadata) - pb_request = network.ListNetworksRequest.pb(request) + request, metadata = self._interceptor.pre_rename_nfs_share( + request, metadata + ) + pb_request = nfs_share.RenameNfsShareRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1644,6 +4476,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1652,16 +4485,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = network.ListNetworksResponse() - pb_resp = network.ListNetworksResponse.pb(resp) + resp = nfs_share.NfsShare() + pb_resp = nfs_share.NfsShare.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_networks(resp) + resp = self._interceptor.post_rename_nfs_share(resp) return resp - class _ListNetworkUsage(BareMetalSolutionRestStub): + class _RenameVolume(BareMetalSolutionRestStub): def __hash__(self): - return hash("ListNetworkUsage") + return hash("RenameVolume") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1675,17 +4508,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: network.ListNetworkUsageRequest, + request: volume.RenameVolumeRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> network.ListNetworkUsageResponse: - r"""Call the list network usage method over HTTP. + ) -> volume.Volume: + r"""Call the rename volume method over HTTP. Args: - request (~.network.ListNetworkUsageRequest): - The request object. Request to get networks with IPs. + request (~.volume.RenameVolumeRequest): + The request object. Message requesting rename of a + server. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1693,22 +4527,28 @@ def __call__( sent along with the request as metadata. Returns: - ~.network.ListNetworkUsageResponse: - Response with Networks with IPs + ~.volume.Volume: + A storage volume. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v2/{location=projects/*/locations/*}/networks:listNetworkUsage", + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/volumes/*}:rename", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_network_usage( - request, metadata - ) - pb_request = network.ListNetworkUsageRequest.pb(request) + request, metadata = self._interceptor.pre_rename_volume(request, metadata) + pb_request = volume.RenameVolumeRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1732,6 +4572,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1740,16 +4581,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = network.ListNetworkUsageResponse() - pb_resp = network.ListNetworkUsageResponse.pb(resp) + resp = volume.Volume() + pb_resp = volume.Volume.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_network_usage(resp) + resp = self._interceptor.post_rename_volume(resp) return resp - class _ListNfsShares(BareMetalSolutionRestStub): + class _ResetInstance(BareMetalSolutionRestStub): def __hash__(self): - return hash("ListNfsShares") + return hash("ResetInstance") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1763,18 +4604,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: nfs_share.ListNfsSharesRequest, + request: instance.ResetInstanceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> nfs_share.ListNfsSharesResponse: - r"""Call the list nfs shares method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the reset instance method over HTTP. Args: - request (~.nfs_share.ListNfsSharesRequest): - The request object. Message for requesting a list of NFS - shares. + request (~.instance.ResetInstanceRequest): + The request object. Message requesting to reset a server. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1782,22 +4622,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.nfs_share.ListNfsSharesResponse: - Response message containing the list - of NFS shares. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v2/{parent=projects/*/locations/*}/nfsShares", + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/instances/*}:reset", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_nfs_shares(request, metadata) - pb_request = nfs_share.ListNfsSharesRequest.pb(request) + request, metadata = self._interceptor.pre_reset_instance(request, metadata) + pb_request = instance.ResetInstanceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1821,6 +4670,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1829,16 +4679,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = nfs_share.ListNfsSharesResponse() - pb_resp = nfs_share.ListNfsSharesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_nfs_shares(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reset_instance(resp) return resp - class _ListVolumes(BareMetalSolutionRestStub): + class _ResizeVolume(BareMetalSolutionRestStub): def __hash__(self): - return hash("ListVolumes") + return hash("ResizeVolume") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1852,18 +4700,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: volume.ListVolumesRequest, + request: gcb_volume.ResizeVolumeRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> volume.ListVolumesResponse: - r"""Call the list volumes method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the resize volume method over HTTP. Args: - request (~.volume.ListVolumesRequest): - The request object. Message for requesting a list of - storage volumes. + request (~.gcb_volume.ResizeVolumeRequest): + The request object. Request for emergency resize Volume. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1871,22 +4718,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.volume.ListVolumesResponse: - Response message containing the list - of storage volumes. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v2/{parent=projects/*/locations/*}/volumes", + "method": "post", + "uri": "/v2/{volume=projects/*/locations/*/volumes/*}:resize", + "body": "*", }, ] - request, metadata = self._interceptor.pre_list_volumes(request, metadata) - pb_request = volume.ListVolumesRequest.pb(request) + request, metadata = self._interceptor.pre_resize_volume(request, metadata) + pb_request = gcb_volume.ResizeVolumeRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1910,6 +4766,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1918,16 +4775,14 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = volume.ListVolumesResponse() - pb_resp = volume.ListVolumesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_volumes(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resize_volume(resp) return resp - class _ResetInstance(BareMetalSolutionRestStub): + class _RestoreVolumeSnapshot(BareMetalSolutionRestStub): def __hash__(self): - return hash("ResetInstance") + return hash("RestoreVolumeSnapshot") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -1941,17 +4796,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: instance.ResetInstanceRequest, + request: gcb_volume_snapshot.RestoreVolumeSnapshotRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the reset instance method over HTTP. + r"""Call the restore volume snapshot method over HTTP. Args: - request (~.instance.ResetInstanceRequest): - The request object. Message requesting to reset a server. + request (~.gcb_volume_snapshot.RestoreVolumeSnapshotRequest): + The request object. Message for restoring a volume + snapshot. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1969,12 +4825,14 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v2/{name=projects/*/locations/*/instances/*}:reset", + "uri": "/v2/{volume_snapshot=projects/*/locations/*/volumes/*/snapshots/*}:restoreVolumeSnapshot", "body": "*", }, ] - request, metadata = self._interceptor.pre_reset_instance(request, metadata) - pb_request = instance.ResetInstanceRequest.pb(request) + request, metadata = self._interceptor.pre_restore_volume_snapshot( + request, metadata + ) + pb_request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -2018,12 +4876,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_reset_instance(resp) + resp = self._interceptor.post_restore_volume_snapshot(resp) return resp - class _ResizeVolume(BareMetalSolutionRestStub): + class _StartInstance(BareMetalSolutionRestStub): def __hash__(self): - return hash("ResizeVolume") + return hash("StartInstance") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2037,17 +4895,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: gcb_volume.ResizeVolumeRequest, + request: instance.StartInstanceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the resize volume method over HTTP. + r"""Call the start instance method over HTTP. Args: - request (~.gcb_volume.ResizeVolumeRequest): - The request object. Request for emergency resize Volume. + request (~.instance.StartInstanceRequest): + The request object. Message requesting to start a server. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2065,12 +4923,12 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v2/{volume=projects/*/locations/*/volumes/*}:resize", + "uri": "/v2/{name=projects/*/locations/*/instances/*}:start", "body": "*", }, ] - request, metadata = self._interceptor.pre_resize_volume(request, metadata) - pb_request = gcb_volume.ResizeVolumeRequest.pb(request) + request, metadata = self._interceptor.pre_start_instance(request, metadata) + pb_request = instance.StartInstanceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -2114,12 +4972,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_resize_volume(resp) + resp = self._interceptor.post_start_instance(resp) return resp - class _StartInstance(BareMetalSolutionRestStub): + class _StopInstance(BareMetalSolutionRestStub): def __hash__(self): - return hash("StartInstance") + return hash("StopInstance") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2133,17 +4991,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: instance.StartInstanceRequest, + request: instance.StopInstanceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the start instance method over HTTP. + r"""Call the stop instance method over HTTP. Args: - request (~.instance.StartInstanceRequest): - The request object. Message requesting to start a server. + request (~.instance.StopInstanceRequest): + The request object. Message requesting to stop a server. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2161,12 +5019,12 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v2/{name=projects/*/locations/*/instances/*}:start", + "uri": "/v2/{name=projects/*/locations/*/instances/*}:stop", "body": "*", }, ] - request, metadata = self._interceptor.pre_start_instance(request, metadata) - pb_request = instance.StartInstanceRequest.pb(request) + request, metadata = self._interceptor.pre_stop_instance(request, metadata) + pb_request = instance.StopInstanceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -2210,12 +5068,12 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_start_instance(resp) + resp = self._interceptor.post_stop_instance(resp) return resp - class _StopInstance(BareMetalSolutionRestStub): + class _SubmitProvisioningConfig(BareMetalSolutionRestStub): def __hash__(self): - return hash("StopInstance") + return hash("SubmitProvisioningConfig") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -2229,40 +5087,42 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: instance.StopInstanceRequest, + request: provisioning.SubmitProvisioningConfigRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the stop instance method over HTTP. - - Args: - request (~.instance.StopInstanceRequest): - The request object. Message requesting to stop a server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ) -> provisioning.SubmitProvisioningConfigResponse: + r"""Call the submit provisioning + config method over HTTP. + + Args: + request (~.provisioning.SubmitProvisioningConfigRequest): + The request object. Request for SubmitProvisioningConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.provisioning.SubmitProvisioningConfigResponse: + Response for + SubmitProvisioningConfig. """ http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v2/{name=projects/*/locations/*/instances/*}:stop", + "uri": "/v2/{parent=projects/*/locations/*}/provisioningConfigs:submit", "body": "*", }, ] - request, metadata = self._interceptor.pre_stop_instance(request, metadata) - pb_request = instance.StopInstanceRequest.pb(request) + request, metadata = self._interceptor.pre_submit_provisioning_config( + request, metadata + ) + pb_request = provisioning.SubmitProvisioningConfigRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -2304,9 +5164,11 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_stop_instance(resp) + resp = provisioning.SubmitProvisioningConfigResponse() + pb_resp = provisioning.SubmitProvisioningConfigResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_submit_provisioning_config(resp) return resp class _UpdateInstance(BareMetalSolutionRestStub): @@ -2529,7 +5391,7 @@ def __call__( Args: request (~.gcb_nfs_share.UpdateNfsShareRequest): - The request object. Message requesting to updating a NFS + The request object. Message requesting to updating an NFS share. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -2602,6 +5464,107 @@ def __call__( resp = self._interceptor.post_update_nfs_share(resp) return resp + class _UpdateProvisioningConfig(BareMetalSolutionRestStub): + def __hash__(self): + return hash("UpdateProvisioningConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: provisioning.UpdateProvisioningConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> provisioning.ProvisioningConfig: + r"""Call the update provisioning + config method over HTTP. + + Args: + request (~.provisioning.UpdateProvisioningConfigRequest): + The request object. Message for updating a + ProvisioningConfig. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.provisioning.ProvisioningConfig: + A provisioning configuration. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{provisioning_config.name=projects/*/locations/*/provisioningConfigs/*}", + "body": "provisioning_config", + }, + ] + request, metadata = self._interceptor.pre_update_provisioning_config( + request, metadata + ) + pb_request = provisioning.UpdateProvisioningConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = provisioning.ProvisioningConfig() + pb_resp = provisioning.ProvisioningConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_provisioning_config(resp) + return resp + class _UpdateVolume(BareMetalSolutionRestStub): def __hash__(self): return hash("UpdateVolume") @@ -2698,6 +5661,67 @@ def __call__( resp = self._interceptor.post_update_volume(resp) return resp + @property + def create_nfs_share( + self, + ) -> Callable[[gcb_nfs_share.CreateNfsShareRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateNfsShare(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_provisioning_config( + self, + ) -> Callable[ + [provisioning.CreateProvisioningConfigRequest], provisioning.ProvisioningConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateProvisioningConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_ssh_key( + self, + ) -> Callable[[gcb_ssh_key.CreateSSHKeyRequest], gcb_ssh_key.SSHKey]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSSHKey(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_volume_snapshot( + self, + ) -> Callable[ + [gcb_volume_snapshot.CreateVolumeSnapshotRequest], + gcb_volume_snapshot.VolumeSnapshot, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateVolumeSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_nfs_share( + self, + ) -> Callable[[nfs_share.DeleteNfsShareRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteNfsShare(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_ssh_key( + self, + ) -> Callable[[ssh_key.DeleteSSHKeyRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSSHKey(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_volume_snapshot( + self, + ) -> Callable[[volume_snapshot.DeleteVolumeSnapshotRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteVolumeSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property def detach_lun( self, @@ -2706,6 +5730,40 @@ def detach_lun( # In C++ this would require a dynamic_cast return self._DetachLun(self._session, self._host, self._interceptor) # type: ignore + @property + def disable_interactive_serial_console( + self, + ) -> Callable[ + [instance.DisableInteractiveSerialConsoleRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DisableInteractiveSerialConsole(self._session, self._host, self._interceptor) # type: ignore + + @property + def enable_interactive_serial_console( + self, + ) -> Callable[ + [instance.EnableInteractiveSerialConsoleRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnableInteractiveSerialConsole(self._session, self._host, self._interceptor) # type: ignore + + @property + def evict_lun(self) -> Callable[[lun.EvictLunRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EvictLun(self._session, self._host, self._interceptor) # type: ignore + + @property + def evict_volume( + self, + ) -> Callable[[volume.EvictVolumeRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EvictVolume(self._session, self._host, self._interceptor) # type: ignore + @property def get_instance( self, @@ -2734,12 +5792,32 @@ def get_nfs_share( # In C++ this would require a dynamic_cast return self._GetNfsShare(self._session, self._host, self._interceptor) # type: ignore + @property + def get_provisioning_config( + self, + ) -> Callable[ + [provisioning.GetProvisioningConfigRequest], provisioning.ProvisioningConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProvisioningConfig(self._session, self._host, self._interceptor) # type: ignore + @property def get_volume(self) -> Callable[[volume.GetVolumeRequest], volume.Volume]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetVolume(self._session, self._host, self._interceptor) # type: ignore + @property + def get_volume_snapshot( + self, + ) -> Callable[ + [volume_snapshot.GetVolumeSnapshotRequest], volume_snapshot.VolumeSnapshot + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetVolumeSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property def list_instances( self, @@ -2778,6 +5856,33 @@ def list_nfs_shares( # In C++ this would require a dynamic_cast return self._ListNfsShares(self._session, self._host, self._interceptor) # type: ignore + @property + def list_os_images( + self, + ) -> Callable[[osimage.ListOSImagesRequest], osimage.ListOSImagesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOSImages(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_provisioning_quotas( + self, + ) -> Callable[ + [provisioning.ListProvisioningQuotasRequest], + provisioning.ListProvisioningQuotasResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListProvisioningQuotas(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_ssh_keys( + self, + ) -> Callable[[ssh_key.ListSSHKeysRequest], ssh_key.ListSSHKeysResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSSHKeys(self._session, self._host, self._interceptor) # type: ignore + @property def list_volumes( self, @@ -2786,6 +5891,47 @@ def list_volumes( # In C++ this would require a dynamic_cast return self._ListVolumes(self._session, self._host, self._interceptor) # type: ignore + @property + def list_volume_snapshots( + self, + ) -> Callable[ + [volume_snapshot.ListVolumeSnapshotsRequest], + volume_snapshot.ListVolumeSnapshotsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListVolumeSnapshots(self._session, self._host, self._interceptor) # type: ignore + + @property + def rename_instance( + self, + ) -> Callable[[instance.RenameInstanceRequest], instance.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RenameInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def rename_network( + self, + ) -> Callable[[network.RenameNetworkRequest], network.Network]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RenameNetwork(self._session, self._host, self._interceptor) # type: ignore + + @property + def rename_nfs_share( + self, + ) -> Callable[[nfs_share.RenameNfsShareRequest], nfs_share.NfsShare]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RenameNfsShare(self._session, self._host, self._interceptor) # type: ignore + + @property + def rename_volume(self) -> Callable[[volume.RenameVolumeRequest], volume.Volume]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RenameVolume(self._session, self._host, self._interceptor) # type: ignore + @property def reset_instance( self, @@ -2802,6 +5948,16 @@ def resize_volume( # In C++ this would require a dynamic_cast return self._ResizeVolume(self._session, self._host, self._interceptor) # type: ignore + @property + def restore_volume_snapshot( + self, + ) -> Callable[ + [gcb_volume_snapshot.RestoreVolumeSnapshotRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreVolumeSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property def start_instance( self, @@ -2818,6 +5974,17 @@ def stop_instance( # In C++ this would require a dynamic_cast return self._StopInstance(self._session, self._host, self._interceptor) # type: ignore + @property + def submit_provisioning_config( + self, + ) -> Callable[ + [provisioning.SubmitProvisioningConfigRequest], + provisioning.SubmitProvisioningConfigResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SubmitProvisioningConfig(self._session, self._host, self._interceptor) # type: ignore + @property def update_instance( self, @@ -2842,6 +6009,16 @@ def update_nfs_share( # In C++ this would require a dynamic_cast return self._UpdateNfsShare(self._session, self._host, self._interceptor) # type: ignore + @property + def update_provisioning_config( + self, + ) -> Callable[ + [provisioning.UpdateProvisioningConfigRequest], provisioning.ProvisioningConfig + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateProvisioningConfig(self._session, self._host, self._interceptor) # type: ignore + @property def update_volume( self, diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/__init__.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/__init__.py index a0b3d4838bdd..d6961e69f495 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/__init__.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/__init__.py @@ -16,10 +16,15 @@ from .baremetalsolution import OperationMetadata, ResetInstanceResponse from .instance import ( DetachLunRequest, + DisableInteractiveSerialConsoleRequest, + DisableInteractiveSerialConsoleResponse, + EnableInteractiveSerialConsoleRequest, + EnableInteractiveSerialConsoleResponse, GetInstanceRequest, Instance, ListInstancesRequest, ListInstancesResponse, + RenameInstanceRequest, ResetInstanceRequest, ServerNetworkTemplate, StartInstanceRequest, @@ -28,7 +33,7 @@ StopInstanceResponse, UpdateInstanceRequest, ) -from .lun import GetLunRequest, ListLunsRequest, ListLunsResponse, Lun +from .lun import EvictLunRequest, GetLunRequest, ListLunsRequest, ListLunsResponse, Lun from .network import ( VRF, GetNetworkRequest, @@ -39,33 +44,79 @@ LogicalInterface, Network, NetworkAddressReservation, + NetworkMountPoint, NetworkUsage, + RenameNetworkRequest, UpdateNetworkRequest, ) from .nfs_share import ( + CreateNfsShareRequest, + DeleteNfsShareRequest, GetNfsShareRequest, ListNfsSharesRequest, ListNfsSharesResponse, NfsShare, + RenameNfsShareRequest, UpdateNfsShareRequest, ) +from .osimage import ListOSImagesRequest, ListOSImagesResponse, OSImage +from .provisioning import ( + CreateProvisioningConfigRequest, + GetProvisioningConfigRequest, + InstanceConfig, + InstanceQuota, + ListProvisioningQuotasRequest, + ListProvisioningQuotasResponse, + NetworkConfig, + ProvisioningConfig, + ProvisioningQuota, + SubmitProvisioningConfigRequest, + SubmitProvisioningConfigResponse, + UpdateProvisioningConfigRequest, + VolumeConfig, +) +from .ssh_key import ( + CreateSSHKeyRequest, + DeleteSSHKeyRequest, + ListSSHKeysRequest, + ListSSHKeysResponse, + SSHKey, +) from .volume import ( + EvictVolumeRequest, GetVolumeRequest, ListVolumesRequest, ListVolumesResponse, + RenameVolumeRequest, ResizeVolumeRequest, UpdateVolumeRequest, Volume, ) +from .volume_snapshot import ( + CreateVolumeSnapshotRequest, + DeleteVolumeSnapshotRequest, + GetVolumeSnapshotRequest, + ListVolumeSnapshotsRequest, + ListVolumeSnapshotsResponse, + RestoreVolumeSnapshotRequest, + VolumeSnapshot, +) __all__ = ( "OperationMetadata", "ResetInstanceResponse", + "VolumePerformanceTier", + "WorkloadProfile", "DetachLunRequest", + "DisableInteractiveSerialConsoleRequest", + "DisableInteractiveSerialConsoleResponse", + "EnableInteractiveSerialConsoleRequest", + "EnableInteractiveSerialConsoleResponse", "GetInstanceRequest", "Instance", "ListInstancesRequest", "ListInstancesResponse", + "RenameInstanceRequest", "ResetInstanceRequest", "ServerNetworkTemplate", "StartInstanceRequest", @@ -73,6 +124,7 @@ "StopInstanceRequest", "StopInstanceResponse", "UpdateInstanceRequest", + "EvictLunRequest", "GetLunRequest", "ListLunsRequest", "ListLunsResponse", @@ -85,18 +137,53 @@ "LogicalInterface", "Network", "NetworkAddressReservation", + "NetworkMountPoint", "NetworkUsage", + "RenameNetworkRequest", "UpdateNetworkRequest", "VRF", + "CreateNfsShareRequest", + "DeleteNfsShareRequest", "GetNfsShareRequest", "ListNfsSharesRequest", "ListNfsSharesResponse", "NfsShare", + "RenameNfsShareRequest", "UpdateNfsShareRequest", + "ListOSImagesRequest", + "ListOSImagesResponse", + "OSImage", + "CreateProvisioningConfigRequest", + "GetProvisioningConfigRequest", + "InstanceConfig", + "InstanceQuota", + "ListProvisioningQuotasRequest", + "ListProvisioningQuotasResponse", + "NetworkConfig", + "ProvisioningConfig", + "ProvisioningQuota", + "SubmitProvisioningConfigRequest", + "SubmitProvisioningConfigResponse", + "UpdateProvisioningConfigRequest", + "VolumeConfig", + "CreateSSHKeyRequest", + "DeleteSSHKeyRequest", + "ListSSHKeysRequest", + "ListSSHKeysResponse", + "SSHKey", + "EvictVolumeRequest", "GetVolumeRequest", "ListVolumesRequest", "ListVolumesResponse", + "RenameVolumeRequest", "ResizeVolumeRequest", "UpdateVolumeRequest", "Volume", + "CreateVolumeSnapshotRequest", + "DeleteVolumeSnapshotRequest", + "GetVolumeSnapshotRequest", + "ListVolumeSnapshotsRequest", + "ListVolumeSnapshotsResponse", + "RestoreVolumeSnapshotRequest", + "VolumeSnapshot", ) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/baremetalsolution.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/baremetalsolution.py index 6f8fac9a6b01..ba7b4cb68437 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/baremetalsolution.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/baremetalsolution.py @@ -34,25 +34,29 @@ class OperationMetadata(proto.Message): Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): - The time the operation was created. + Output only. The time the operation was + created. end_time (google.protobuf.timestamp_pb2.Timestamp): - The time the operation finished running. + Output only. The time the operation finished + running. target (str): - Server-defined resource path for the target - of the operation. + Output only. Server-defined resource path for + the target of the operation. verb (str): - Name of the action executed by the operation. + Output only. Name of the action executed by + the operation. status_message (str): - Human-readable status of the operation, if - any. + Output only. Human-readable status of the + operation, if any. requested_cancellation (bool): - Identifies whether the user requested the cancellation of - the operation. Operations that have been successfully - cancelled have [Operation.error][] value with a - [google.rpc.Status.code][] of 1, corresponding to - ``Code.CANCELLED``. + Output only. Identifies whether the user requested the + cancellation of the operation. Operations that have been + successfully cancelled have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. api_version (str): - API version used with the operation. + Output only. API version used with the + operation. """ create_time: timestamp_pb2.Timestamp = proto.Field( diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/common.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/common.py new file mode 100644 index 000000000000..f3465c7ae9ab --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/common.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.baremetalsolution.v2", + manifest={ + "VolumePerformanceTier", + "WorkloadProfile", + }, +) + + +class VolumePerformanceTier(proto.Enum): + r"""Performance tier of the Volume. + + Values: + VOLUME_PERFORMANCE_TIER_UNSPECIFIED (0): + Value is not specified. + VOLUME_PERFORMANCE_TIER_SHARED (1): + Regular volumes, shared aggregates. + VOLUME_PERFORMANCE_TIER_ASSIGNED (2): + Assigned aggregates. + VOLUME_PERFORMANCE_TIER_HT (3): + High throughput aggregates. + """ + VOLUME_PERFORMANCE_TIER_UNSPECIFIED = 0 + VOLUME_PERFORMANCE_TIER_SHARED = 1 + VOLUME_PERFORMANCE_TIER_ASSIGNED = 2 + VOLUME_PERFORMANCE_TIER_HT = 3 + + +class WorkloadProfile(proto.Enum): + r"""The possible values for a workload profile. + + Values: + WORKLOAD_PROFILE_UNSPECIFIED (0): + The workload profile is in an unknown state. + WORKLOAD_PROFILE_GENERIC (1): + The workload profile is generic. + WORKLOAD_PROFILE_HANA (2): + The workload profile is hana. + """ + WORKLOAD_PROFILE_UNSPECIFIED = 0 + WORKLOAD_PROFILE_GENERIC = 1 + WORKLOAD_PROFILE_HANA = 2 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/instance.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/instance.py index ba8a8a24345c..85961119f234 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/instance.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/instance.py @@ -21,8 +21,9 @@ from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.cloud.bare_metal_solution_v2.types import common from google.cloud.bare_metal_solution_v2.types import lun as gcb_lun -from google.cloud.bare_metal_solution_v2.types import network +from google.cloud.bare_metal_solution_v2.types import network, volume __protobuf__ = proto.module( package="google.cloud.baremetalsolution.v2", @@ -32,13 +33,18 @@ "ListInstancesRequest", "ListInstancesResponse", "UpdateInstanceRequest", + "RenameInstanceRequest", "ResetInstanceRequest", "StartInstanceRequest", + "StartInstanceResponse", "StopInstanceRequest", + "StopInstanceResponse", + "EnableInteractiveSerialConsoleRequest", + "EnableInteractiveSerialConsoleResponse", + "DisableInteractiveSerialConsoleRequest", + "DisableInteractiveSerialConsoleResponse", "DetachLunRequest", "ServerNetworkTemplate", - "StartInstanceResponse", - "StopInstanceResponse", }, ) @@ -48,23 +54,22 @@ class Instance(proto.Message): Attributes: name (str): - Output only. The resource name of this ``Instance``. - Resource names are schemeless URIs that follow the - conventions in + Immutable. The resource name of this ``Instance``. Resource + names are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. Format: ``projects/{project}/locations/{location}/instances/{instance}`` id (str): - An identifier for the ``Instance``, generated by the - backend. + Output only. An identifier for the ``Instance``, generated + by the backend. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Create a time stamp. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Update a time stamp. machine_type (str): - The server type. `Available server + Immutable. The server type. `Available server types `__ state (google.cloud.bare_metal_solution_v2.types.Instance.State): - The state of the server. + Output only. The state of the server. hyperthreading_enabled (bool): True if you enable hyperthreading for the server, otherwise false. The default value is @@ -72,13 +77,19 @@ class Instance(proto.Message): labels (MutableMapping[str, str]): Labels as key value pairs. luns (MutableSequence[google.cloud.bare_metal_solution_v2.types.Lun]): - List of LUNs associated with this server. + Immutable. List of LUNs associated with this + server. + volumes (MutableSequence[google.cloud.bare_metal_solution_v2.types.Volume]): + Input only. List of Volumes to attach to this + Instance on creation. This field won't be + populated in Get/List responses. networks (MutableSequence[google.cloud.bare_metal_solution_v2.types.Network]): - List of networks associated with this server. + Output only. List of networks associated with + this server. interactive_serial_console_enabled (bool): - True if the interactive serial console - feature is enabled for the instance, false - otherwise. The default value is false. + Output only. True if the interactive serial + console feature is enabled for the instance, + false otherwise. The default value is false. os_image (str): The OS image currently installed on the server. @@ -102,6 +113,14 @@ class Instance(proto.Message): Instance.logical_interfaces fields will be filled to ensure backward compatibility. For the others, only Instance.logical_interfaces will be filled. + login_info (str): + Output only. Text field about info for + logging in. + workload_profile (google.cloud.bare_metal_solution_v2.types.WorkloadProfile): + The workload profile for the instance. + firmware_version (str): + Output only. The firmware version for the + instance. """ class State(proto.Enum): @@ -116,11 +135,23 @@ class State(proto.Enum): The server is running. DELETED (3): The server has been deleted. + UPDATING (4): + The server is being updated. + STARTING (5): + The server is starting. + STOPPING (6): + The server is stopping. + SHUTDOWN (7): + The server is shutdown. """ STATE_UNSPECIFIED = 0 PROVISIONING = 1 RUNNING = 2 DELETED = 3 + UPDATING = 4 + STARTING = 5 + STOPPING = 6 + SHUTDOWN = 7 name: str = proto.Field( proto.STRING, @@ -163,6 +194,11 @@ class State(proto.Enum): number=8, message=gcb_lun.Lun, ) + volumes: MutableSequence[volume.Volume] = proto.RepeatedField( + proto.MESSAGE, + number=16, + message=volume.Volume, + ) networks: MutableSequence[network.Network] = proto.RepeatedField( proto.MESSAGE, number=9, @@ -189,6 +225,19 @@ class State(proto.Enum): number=15, message=network.LogicalInterface, ) + login_info: str = proto.Field( + proto.STRING, + number=17, + ) + workload_profile: common.WorkloadProfile = proto.Field( + proto.ENUM, + number=18, + enum=common.WorkloadProfile, + ) + firmware_version: str = proto.Field( + proto.STRING, + number=19, + ) class GetInstanceRequest(proto.Message): @@ -300,6 +349,28 @@ class UpdateInstanceRequest(proto.Message): ) +class RenameInstanceRequest(proto.Message): + r"""Message requesting rename of a server. + + Attributes: + name (str): + Required. The ``name`` field is used to identify the + instance. Format: + projects/{project}/locations/{location}/instances/{instance} + new_instance_id (str): + Required. The new ``id`` of the instance. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + new_instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + + class ResetInstanceRequest(proto.Message): r"""Message requesting to reset a server. @@ -328,6 +399,10 @@ class StartInstanceRequest(proto.Message): ) +class StartInstanceResponse(proto.Message): + r"""Response message from starting a server.""" + + class StopInstanceRequest(proto.Message): r"""Message requesting to stop a server. @@ -342,6 +417,48 @@ class StopInstanceRequest(proto.Message): ) +class StopInstanceResponse(proto.Message): + r"""Response message from stopping a server.""" + + +class EnableInteractiveSerialConsoleRequest(proto.Message): + r"""Message for enabling the interactive serial console on an + instance. + + Attributes: + name (str): + Required. Name of the resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EnableInteractiveSerialConsoleResponse(proto.Message): + r"""Message for response of EnableInteractiveSerialConsole.""" + + +class DisableInteractiveSerialConsoleRequest(proto.Message): + r"""Message for disabling the interactive serial console on an + instance. + + Attributes: + name (str): + Required. Name of the resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DisableInteractiveSerialConsoleResponse(proto.Message): + r"""Message for response of DisableInteractiveSerialConsole.""" + + class DetachLunRequest(proto.Message): r"""Message for detach specific LUN from an Instance. @@ -350,6 +467,9 @@ class DetachLunRequest(proto.Message): Required. Name of the instance. lun (str): Required. Name of the Lun to detach. + skip_reboot (bool): + If true, performs lun unmapping without + instance reboot. """ instance: str = proto.Field( @@ -360,6 +480,10 @@ class DetachLunRequest(proto.Message): proto.STRING, number=2, ) + skip_reboot: bool = proto.Field( + proto.BOOL, + number=3, + ) class ServerNetworkTemplate(proto.Message): @@ -441,12 +565,4 @@ class InterfaceType(proto.Enum): ) -class StartInstanceResponse(proto.Message): - r"""Response message from starting a server.""" - - -class StopInstanceResponse(proto.Message): - r"""Response message from stopping a server.""" - - __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/lun.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/lun.py index 8f593790c9b9..a2d79a0760ea 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/lun.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/lun.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -26,6 +27,7 @@ "GetLunRequest", "ListLunsRequest", "ListLunsResponse", + "EvictLunRequest", }, ) @@ -58,6 +60,12 @@ class Lun(proto.Message): The storage type for this LUN. wwid (str): The WWID for this LUN. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time after which LUN will be fully deleted. It + is filled only for LUNs in COOL_OFF state. + instances (MutableSequence[str]): + Output only. Instances this Lun is attached + to. """ class State(proto.Enum): @@ -74,12 +82,16 @@ class State(proto.Enum): The LUN is ready for use. DELETING (4): The LUN has been requested to be deleted. + COOL_OFF (5): + The LUN is in cool off state. It will be deleted after + ``expire_time``. """ STATE_UNSPECIFIED = 0 CREATING = 1 UPDATING = 2 READY = 3 DELETING = 4 + COOL_OFF = 5 class MultiprotocolType(proto.Enum): r"""Display the operating systems present for the LUN @@ -152,6 +164,15 @@ class StorageType(proto.Enum): proto.STRING, number=9, ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) class GetLunRequest(proto.Message): @@ -229,4 +250,18 @@ def raw_page(self): ) +class EvictLunRequest(proto.Message): + r"""Request for skip lun cooloff and delete it. + + Attributes: + name (str): + Required. The name of the lun. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/network.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/network.py index 93cff7a73166..ad4ceaebecac 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/network.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/network.py @@ -34,6 +34,8 @@ "NetworkUsage", "ListNetworkUsageRequest", "ListNetworkUsageResponse", + "NetworkMountPoint", + "RenameNetworkRequest", }, ) @@ -74,6 +76,16 @@ class Network(proto.Message): be generated if a reservation conflicts with an IP address already allocated to a physical server. + pod (str): + Output only. Pod name. + mount_points (MutableSequence[google.cloud.bare_metal_solution_v2.types.NetworkMountPoint]): + Input only. List of mount points to attach + the network to. + jumbo_frames_enabled (bool): + Whether network uses standard frames or jumbo + ones. + gateway_ip (str): + Output only. Gateway ip address. """ class Type(proto.Enum): @@ -103,10 +115,16 @@ class State(proto.Enum): The Network is provisioning. PROVISIONED (2): The Network has been provisioned. + DEPROVISIONING (3): + The Network is being deprovisioned. + UPDATING (4): + The Network is being updated. """ STATE_UNSPECIFIED = 0 PROVISIONING = 1 PROVISIONED = 2 + DEPROVISIONING = 3 + UPDATING = 4 name: str = proto.Field( proto.STRING, @@ -161,6 +179,23 @@ class State(proto.Enum): number=13, message="NetworkAddressReservation", ) + pod: str = proto.Field( + proto.STRING, + number=14, + ) + mount_points: MutableSequence["NetworkMountPoint"] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message="NetworkMountPoint", + ) + jumbo_frames_enabled: bool = proto.Field( + proto.BOOL, + number=16, + ) + gateway_ip: str = proto.Field( + proto.STRING, + number=17, + ) class NetworkAddressReservation(proto.Message): @@ -205,6 +240,11 @@ class VRF(proto.Message): The possible state of VRF. qos_policy (google.cloud.bare_metal_solution_v2.types.VRF.QosPolicy): The QOS policy applied to this VRF. + The value is only meaningful when all the vlan + attachments have the same QoS. This field should + not be used for new integrations, use vlan + attachment level qos instead. The field is left + for backward-compatibility. vlan_attachments (MutableSequence[google.cloud.bare_metal_solution_v2.types.VRF.VlanAttachment]): The list of VLAN attachments for the VRF. """ @@ -248,6 +288,19 @@ class VlanAttachment(proto.Message): The peer IP of the attachment. router_ip (str): The router IP of the attachment. + pairing_key (str): + Input only. Pairing key. + qos_policy (google.cloud.bare_metal_solution_v2.types.VRF.QosPolicy): + The QOS policy applied to this VLAN + attachment. This value should be preferred to + using qos at vrf level. + id (str): + Immutable. The identifier of the attachment + within vrf. + interconnect_attachment (str): + Optional. The name of the vlan attachment within vrf. This + is of the form + projects/{project_number}/regions/{region}/interconnectAttachments/{interconnect_attachment} """ peer_vlan_id: int = proto.Field( @@ -262,6 +315,23 @@ class VlanAttachment(proto.Message): proto.STRING, number=3, ) + pairing_key: str = proto.Field( + proto.STRING, + number=4, + ) + qos_policy: "VRF.QosPolicy" = proto.Field( + proto.MESSAGE, + number=5, + message="VRF.QosPolicy", + ) + id: str = proto.Field( + proto.STRING, + number=6, + ) + interconnect_attachment: str = proto.Field( + proto.STRING, + number=7, + ) name: str = proto.Field( proto.STRING, @@ -301,7 +371,8 @@ class LogicalInterface(proto.Message): interface_index (int): The index of the logical interface mapping to the index of the hardware bond or nic on the - chosen network template. + chosen network template. This field is + deprecated. """ class LogicalNetworkInterface(proto.Message): @@ -456,7 +527,8 @@ class UpdateNetworkRequest(proto.Message): projects/{project}/locations/{location}/networks/{network} update_mask (google.protobuf.field_mask_pb2.FieldMask): The list of fields to update. The only currently supported - fields are: ``labels``, ``reservations`` + fields are: ``labels``, ``reservations``, + ``vrf.vlan_attachments`` """ network: "Network" = proto.Field( @@ -522,4 +594,58 @@ class ListNetworkUsageResponse(proto.Message): ) +class NetworkMountPoint(proto.Message): + r"""Mount point for a network. + + Attributes: + instance (str): + Instance to attach network to. + logical_interface (str): + Logical interface to detach from. + default_gateway (bool): + Network should be a default gateway. + ip_address (str): + Ip address of the server. + """ + + instance: str = proto.Field( + proto.STRING, + number=1, + ) + logical_interface: str = proto.Field( + proto.STRING, + number=2, + ) + default_gateway: bool = proto.Field( + proto.BOOL, + number=3, + ) + ip_address: str = proto.Field( + proto.STRING, + number=4, + ) + + +class RenameNetworkRequest(proto.Message): + r"""Message requesting rename of a server. + + Attributes: + name (str): + Required. The ``name`` field is used to identify the + network. Format: + projects/{project}/locations/{location}/networks/{network} + new_network_id (str): + Required. The new ``id`` of the network. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + new_network_id: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/nfs_share.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/nfs_share.py index db73d2c3412b..72123b7a4425 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/nfs_share.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/nfs_share.py @@ -28,6 +28,9 @@ "ListNfsSharesRequest", "ListNfsSharesResponse", "UpdateNfsShareRequest", + "RenameNfsShareRequest", + "CreateNfsShareRequest", + "DeleteNfsShareRequest", }, ) @@ -37,18 +40,30 @@ class NfsShare(proto.Message): Attributes: name (str): - Output only. The name of the NFS share. + Immutable. The name of the NFS share. nfs_share_id (str): - Output only. An identifier for the NFS share, - generated by the backend. + Output only. An identifier for the NFS share, generated by + the backend. This field will be deprecated in the future, + use ``id`` instead. + id (str): + Output only. An identifier for the NFS share, generated by + the backend. This is the same value as nfs_share_id and will + replace it in the future. state (google.cloud.bare_metal_solution_v2.types.NfsShare.State): - The state of the NFS share. + Output only. The state of the NFS share. volume (str): - The volume containing the share. + Output only. The underlying volume of the + share. Created automatically during + provisioning. allowed_clients (MutableSequence[google.cloud.bare_metal_solution_v2.types.NfsShare.AllowedClient]): List of allowed access points. labels (MutableMapping[str, str]): Labels as key value pairs. + requested_size_gib (int): + The requested size, in GiB. + storage_type (google.cloud.bare_metal_solution_v2.types.NfsShare.StorageType): + Immutable. The storage type of the underlying + volume. """ class State(proto.Enum): @@ -59,9 +74,19 @@ class State(proto.Enum): The share is in an unknown state. PROVISIONED (1): The share has been provisioned. + CREATING (2): + The NFS Share is being created. + UPDATING (3): + The NFS Share is being updated. + DELETING (4): + The NFS Share has been requested to be + deleted. """ STATE_UNSPECIFIED = 0 PROVISIONED = 1 + CREATING = 2 + UPDATING = 3 + DELETING = 4 class MountPermissions(proto.Enum): r"""The possible mount permissions. @@ -80,6 +105,21 @@ class MountPermissions(proto.Enum): READ = 1 READ_WRITE = 2 + class StorageType(proto.Enum): + r"""The storage type for a volume. + + Values: + STORAGE_TYPE_UNSPECIFIED (0): + The storage type for this volume is unknown. + SSD (1): + The storage type for this volume is SSD. + HDD (2): + This storage type for this volume is HDD. + """ + STORAGE_TYPE_UNSPECIFIED = 0 + SSD = 1 + HDD = 2 + class AllowedClient(proto.Message): r"""Represents an 'access point' for the share. @@ -87,7 +127,9 @@ class AllowedClient(proto.Message): network (str): The network the access point sits on. share_ip (str): - The IP address of the share on this network. + Output only. The IP address of the share on this network. + Assigned automatically during provisioning based on the + network's services_cidr. allowed_clients_cidr (str): The subnet of IP addresses permitted to access the share. @@ -103,6 +145,12 @@ class AllowedClient(proto.Message): NFS. Root squash is a special mapping of the remote superuser (root) identity when using identity authentication. + nfs_path (str): + Output only. The path to access NFS, in + format shareIP:/InstanceID + InstanceID is the generated ID instead of + customer provided name. example like + "10.0.0.0:/g123456789-nfs001". """ network: str = proto.Field( @@ -134,6 +182,10 @@ class AllowedClient(proto.Message): proto.BOOL, number=7, ) + nfs_path: str = proto.Field( + proto.STRING, + number=8, + ) name: str = proto.Field( proto.STRING, @@ -143,6 +195,10 @@ class AllowedClient(proto.Message): proto.STRING, number=2, ) + id: str = proto.Field( + proto.STRING, + number=8, + ) state: State = proto.Field( proto.ENUM, number=3, @@ -162,6 +218,15 @@ class AllowedClient(proto.Message): proto.STRING, number=6, ) + requested_size_gib: int = proto.Field( + proto.INT64, + number=7, + ) + storage_type: StorageType = proto.Field( + proto.ENUM, + number=9, + enum=StorageType, + ) class GetNfsShareRequest(proto.Message): @@ -247,7 +312,7 @@ def raw_page(self): class UpdateNfsShareRequest(proto.Message): - r"""Message requesting to updating a NFS share. + r"""Message requesting to updating an NFS share. Attributes: nfs_share (google.cloud.bare_metal_solution_v2.types.NfsShare): @@ -258,7 +323,7 @@ class UpdateNfsShareRequest(proto.Message): projects/{project}/locations/{location}/nfsShares/{nfs_share} update_mask (google.protobuf.field_mask_pb2.FieldMask): The list of fields to update. The only currently supported - fields are: ``labels`` + fields are: ``labels`` ``allowed_clients`` """ nfs_share: "NfsShare" = proto.Field( @@ -273,4 +338,62 @@ class UpdateNfsShareRequest(proto.Message): ) +class RenameNfsShareRequest(proto.Message): + r"""Message requesting rename of a server. + + Attributes: + name (str): + Required. The ``name`` field is used to identify the + nfsshare. Format: + projects/{project}/locations/{location}/nfsshares/{nfsshare} + new_nfsshare_id (str): + Required. The new ``id`` of the nfsshare. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + new_nfsshare_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateNfsShareRequest(proto.Message): + r"""Message for creating an NFS share. + + Attributes: + parent (str): + Required. The parent project and location. + nfs_share (google.cloud.bare_metal_solution_v2.types.NfsShare): + Required. The NfsShare to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + nfs_share: "NfsShare" = proto.Field( + proto.MESSAGE, + number=2, + message="NfsShare", + ) + + +class DeleteNfsShareRequest(proto.Message): + r"""Message for deleting an NFS share. + + Attributes: + name (str): + Required. The name of the NFS share to + delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/osimage.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/osimage.py new file mode 100644 index 000000000000..c085aac56f6c --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/osimage.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.baremetalsolution.v2", + manifest={ + "OSImage", + "ListOSImagesRequest", + "ListOSImagesResponse", + }, +) + + +class OSImage(proto.Message): + r"""Operation System image. + + Attributes: + name (str): + Output only. OS Image's unique name. + code (str): + OS Image code. + description (str): + OS Image description. + applicable_instance_types (MutableSequence[str]): + Instance types this image is applicable to. `Available + types `__ + supported_network_templates (MutableSequence[str]): + Network templates that can be used with this + OS Image. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + code: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + applicable_instance_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + supported_network_templates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class ListOSImagesRequest(proto.Message): + r"""Request for getting all available OS images. + + Attributes: + parent (str): + Required. Parent value for + ListProvisioningQuotasRequest. + page_size (int): + Requested page size. The server might return fewer items + than requested. If unspecified, server will pick an + appropriate default. Notice that page_size field is not + supported and won't be respected in the API request for now, + will be updated when pagination is supported. + page_token (str): + A token identifying a page of results from + the server. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListOSImagesResponse(proto.Message): + r"""Request for getting all available OS images. + + Attributes: + os_images (MutableSequence[google.cloud.bare_metal_solution_v2.types.OSImage]): + The OS images available. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + os_images: MutableSequence["OSImage"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OSImage", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/provisioning.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/provisioning.py new file mode 100644 index 000000000000..2b297c90d3b9 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/provisioning.py @@ -0,0 +1,1054 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.bare_metal_solution_v2.types import common, network + +__protobuf__ = proto.module( + package="google.cloud.baremetalsolution.v2", + manifest={ + "ProvisioningConfig", + "SubmitProvisioningConfigRequest", + "SubmitProvisioningConfigResponse", + "ProvisioningQuota", + "ListProvisioningQuotasRequest", + "ListProvisioningQuotasResponse", + "InstanceConfig", + "VolumeConfig", + "NetworkConfig", + "InstanceQuota", + "GetProvisioningConfigRequest", + "CreateProvisioningConfigRequest", + "UpdateProvisioningConfigRequest", + }, +) + + +class ProvisioningConfig(proto.Message): + r"""A provisioning configuration. + + Attributes: + name (str): + Output only. The system-generated name of the + provisioning config. This follows the UUID + format. + instances (MutableSequence[google.cloud.bare_metal_solution_v2.types.InstanceConfig]): + Instances to be created. + networks (MutableSequence[google.cloud.bare_metal_solution_v2.types.NetworkConfig]): + Networks to be created. + volumes (MutableSequence[google.cloud.bare_metal_solution_v2.types.VolumeConfig]): + Volumes to be created. + ticket_id (str): + A generated ticket id to track provisioning + request. + handover_service_account (str): + A service account to enable customers to + access instance credentials upon handover. + email (str): + Email provided to send a confirmation with + provisioning config to. Deprecated in favour of + email field in request messages. + state (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig.State): + Output only. State of ProvisioningConfig. + location (str): + Optional. Location name of this + ProvisioningConfig. It is optional only for + Intake UI transition period. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update timestamp. + cloud_console_uri (str): + Output only. URI to Cloud Console UI view of + this provisioning config. + vpc_sc_enabled (bool): + If true, VPC SC is enabled for the cluster. + status_message (str): + Optional status messages associated with the + FAILED state. + custom_id (str): + Optional. The user-defined identifier of the + provisioning config. + """ + + class State(proto.Enum): + r"""The possible states for this ProvisioningConfig. + + Values: + STATE_UNSPECIFIED (0): + State wasn't specified. + DRAFT (1): + ProvisioningConfig is a draft and can be + freely modified. + SUBMITTED (2): + ProvisioningConfig was already submitted and + cannot be modified. + PROVISIONING (3): + ProvisioningConfig was in the provisioning + state. Initially this state comes from the work + order table in big query when SNOW is used. + Later this field can be set by the work order + API. + PROVISIONED (4): + ProvisioningConfig was provisioned, meaning + the resources exist. + VALIDATED (5): + ProvisioningConfig was validated. A + validation tool will be run to set this state. + CANCELLED (6): + ProvisioningConfig was canceled. + FAILED (7): + The request is submitted for provisioning, + with error return. + """ + STATE_UNSPECIFIED = 0 + DRAFT = 1 + SUBMITTED = 2 + PROVISIONING = 3 + PROVISIONED = 4 + VALIDATED = 5 + CANCELLED = 6 + FAILED = 7 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + instances: MutableSequence["InstanceConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="InstanceConfig", + ) + networks: MutableSequence["NetworkConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="NetworkConfig", + ) + volumes: MutableSequence["VolumeConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="VolumeConfig", + ) + ticket_id: str = proto.Field( + proto.STRING, + number=5, + ) + handover_service_account: str = proto.Field( + proto.STRING, + number=6, + ) + email: str = proto.Field( + proto.STRING, + number=7, + ) + state: State = proto.Field( + proto.ENUM, + number=8, + enum=State, + ) + location: str = proto.Field( + proto.STRING, + number=9, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + cloud_console_uri: str = proto.Field( + proto.STRING, + number=11, + ) + vpc_sc_enabled: bool = proto.Field( + proto.BOOL, + number=12, + ) + status_message: str = proto.Field( + proto.STRING, + number=13, + ) + custom_id: str = proto.Field( + proto.STRING, + number=14, + ) + + +class SubmitProvisioningConfigRequest(proto.Message): + r"""Request for SubmitProvisioningConfig. + + Attributes: + parent (str): + Required. The parent project and location + containing the ProvisioningConfig. + provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): + Required. The ProvisioningConfig to create. + email (str): + Optional. Email provided to send a + confirmation with provisioning config to. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + provisioning_config: "ProvisioningConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="ProvisioningConfig", + ) + email: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SubmitProvisioningConfigResponse(proto.Message): + r"""Response for SubmitProvisioningConfig. + + Attributes: + provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): + The submitted provisioning config. + """ + + provisioning_config: "ProvisioningConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="ProvisioningConfig", + ) + + +class ProvisioningQuota(proto.Message): + r"""A provisioning quota for a given project. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The name of the provisioning + quota. + asset_type (google.cloud.bare_metal_solution_v2.types.ProvisioningQuota.AssetType): + The asset type of this provisioning quota. + gcp_service (str): + The gcp service of the provisioning quota. + location (str): + The specific location of the provisioining + quota. + available_count (int): + The available count of the provisioning + quota. + instance_quota (google.cloud.bare_metal_solution_v2.types.InstanceQuota): + Instance quota. + + This field is a member of `oneof`_ ``quota``. + server_count (int): + Server count. + + This field is a member of `oneof`_ ``availability``. + network_bandwidth (int): + Network bandwidth, Gbps + + This field is a member of `oneof`_ ``availability``. + storage_gib (int): + Storage size (GB). + + This field is a member of `oneof`_ ``availability``. + """ + + class AssetType(proto.Enum): + r"""The available asset types for intake. + + Values: + ASSET_TYPE_UNSPECIFIED (0): + The unspecified type. + ASSET_TYPE_SERVER (1): + The server asset type. + ASSET_TYPE_STORAGE (2): + The storage asset type. + ASSET_TYPE_NETWORK (3): + The network asset type. + """ + ASSET_TYPE_UNSPECIFIED = 0 + ASSET_TYPE_SERVER = 1 + ASSET_TYPE_STORAGE = 2 + ASSET_TYPE_NETWORK = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + asset_type: AssetType = proto.Field( + proto.ENUM, + number=2, + enum=AssetType, + ) + gcp_service: str = proto.Field( + proto.STRING, + number=3, + ) + location: str = proto.Field( + proto.STRING, + number=4, + ) + available_count: int = proto.Field( + proto.INT32, + number=5, + ) + instance_quota: "InstanceQuota" = proto.Field( + proto.MESSAGE, + number=6, + oneof="quota", + message="InstanceQuota", + ) + server_count: int = proto.Field( + proto.INT64, + number=7, + oneof="availability", + ) + network_bandwidth: int = proto.Field( + proto.INT64, + number=8, + oneof="availability", + ) + storage_gib: int = proto.Field( + proto.INT64, + number=9, + oneof="availability", + ) + + +class ListProvisioningQuotasRequest(proto.Message): + r"""Message for requesting the list of provisioning quotas. + + Attributes: + parent (str): + Required. Parent value for + ListProvisioningQuotasRequest. + page_size (int): + Requested page size. The server might return fewer items + than requested. If unspecified, server will pick an + appropriate default. Notice that page_size field is not + supported and won't be respected in the API request for now, + will be updated when pagination is supported. + page_token (str): + A token identifying a page of results from + the server. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListProvisioningQuotasResponse(proto.Message): + r"""Response message for the list of provisioning quotas. + + Attributes: + provisioning_quotas (MutableSequence[google.cloud.bare_metal_solution_v2.types.ProvisioningQuota]): + The provisioning quotas registered in this + project. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + provisioning_quotas: MutableSequence["ProvisioningQuota"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ProvisioningQuota", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class InstanceConfig(proto.Message): + r"""Configuration parameters for a new instance. + + Attributes: + name (str): + Output only. The name of the instance config. + id (str): + A transient unique identifier to idenfity an + instance within an ProvisioningConfig request. + instance_type (str): + Instance type. `Available + types `__ + hyperthreading (bool): + Whether the instance should be provisioned + with Hyperthreading enabled. + os_image (str): + OS image to initialize the instance. `Available + images `__ + client_network (google.cloud.bare_metal_solution_v2.types.InstanceConfig.NetworkAddress): + Client network address. Filled if + InstanceConfig.multivlan_config is false. + private_network (google.cloud.bare_metal_solution_v2.types.InstanceConfig.NetworkAddress): + Private network address, if any. Filled if + InstanceConfig.multivlan_config is false. + user_note (str): + User note field, it can be used by customers + to add additional information for the BMS Ops + team . + account_networks_enabled (bool): + If true networks can be from different + projects of the same vendor account. + network_config (google.cloud.bare_metal_solution_v2.types.InstanceConfig.NetworkConfig): + The type of network configuration on the + instance. + network_template (str): + Server network template name. Filled if + InstanceConfig.multivlan_config is true. + logical_interfaces (MutableSequence[google.cloud.bare_metal_solution_v2.types.LogicalInterface]): + List of logical interfaces for the instance. The number of + logical interfaces will be the same as number of hardware + bond/nic on the chosen network template. Filled if + InstanceConfig.multivlan_config is true. + ssh_key_names (MutableSequence[str]): + List of names of ssh keys used to provision + the instance. + """ + + class NetworkConfig(proto.Enum): + r"""The network configuration of the instance. + + Values: + NETWORKCONFIG_UNSPECIFIED (0): + The unspecified network configuration. + SINGLE_VLAN (1): + Instance part of single client network and + single private network. + MULTI_VLAN (2): + Instance part of multiple (or single) client + networks and private networks. + """ + NETWORKCONFIG_UNSPECIFIED = 0 + SINGLE_VLAN = 1 + MULTI_VLAN = 2 + + class NetworkAddress(proto.Message): + r"""A network. + + Attributes: + network_id (str): + Id of the network to use, within the same + ProvisioningConfig request. + address (str): + IPv4 address to be assigned to the server. + existing_network_id (str): + Name of the existing network to use. + """ + + network_id: str = proto.Field( + proto.STRING, + number=1, + ) + address: str = proto.Field( + proto.STRING, + number=2, + ) + existing_network_id: str = proto.Field( + proto.STRING, + number=3, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + instance_type: str = proto.Field( + proto.STRING, + number=3, + ) + hyperthreading: bool = proto.Field( + proto.BOOL, + number=4, + ) + os_image: str = proto.Field( + proto.STRING, + number=5, + ) + client_network: NetworkAddress = proto.Field( + proto.MESSAGE, + number=6, + message=NetworkAddress, + ) + private_network: NetworkAddress = proto.Field( + proto.MESSAGE, + number=7, + message=NetworkAddress, + ) + user_note: str = proto.Field( + proto.STRING, + number=8, + ) + account_networks_enabled: bool = proto.Field( + proto.BOOL, + number=9, + ) + network_config: NetworkConfig = proto.Field( + proto.ENUM, + number=10, + enum=NetworkConfig, + ) + network_template: str = proto.Field( + proto.STRING, + number=11, + ) + logical_interfaces: MutableSequence[network.LogicalInterface] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=network.LogicalInterface, + ) + ssh_key_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + + +class VolumeConfig(proto.Message): + r"""Configuration parameters for a new volume. + + Attributes: + name (str): + Output only. The name of the volume config. + id (str): + A transient unique identifier to identify a + volume within an ProvisioningConfig request. + snapshots_enabled (bool): + Whether snapshots should be enabled. + type_ (google.cloud.bare_metal_solution_v2.types.VolumeConfig.Type): + The type of this Volume. + protocol (google.cloud.bare_metal_solution_v2.types.VolumeConfig.Protocol): + Volume protocol. + size_gb (int): + The requested size of this volume, in GB. + lun_ranges (MutableSequence[google.cloud.bare_metal_solution_v2.types.VolumeConfig.LunRange]): + LUN ranges to be configured. Set only when protocol is + PROTOCOL_FC. + machine_ids (MutableSequence[str]): + Machine ids connected to this volume. Set only when protocol + is PROTOCOL_FC. + nfs_exports (MutableSequence[google.cloud.bare_metal_solution_v2.types.VolumeConfig.NfsExport]): + NFS exports. Set only when protocol is PROTOCOL_NFS. + user_note (str): + User note field, it can be used by customers + to add additional information for the BMS Ops + team . + gcp_service (str): + The GCP service of the storage volume. Available gcp_service + are in + https://cloud.google.com/bare-metal/docs/bms-planning. + performance_tier (google.cloud.bare_metal_solution_v2.types.VolumePerformanceTier): + Performance tier of the Volume. + Default is SHARED. + """ + + class Type(proto.Enum): + r"""The types of Volumes. + + Values: + TYPE_UNSPECIFIED (0): + The unspecified type. + FLASH (1): + This Volume is on flash. + DISK (2): + This Volume is on disk. + """ + TYPE_UNSPECIFIED = 0 + FLASH = 1 + DISK = 2 + + class Protocol(proto.Enum): + r"""The protocol used to access the volume. + + Values: + PROTOCOL_UNSPECIFIED (0): + Unspecified value. + PROTOCOL_FC (1): + Fibre channel. + PROTOCOL_NFS (2): + Network file system. + """ + PROTOCOL_UNSPECIFIED = 0 + PROTOCOL_FC = 1 + PROTOCOL_NFS = 2 + + class LunRange(proto.Message): + r"""A LUN(Logical Unit Number) range. + + Attributes: + quantity (int): + Number of LUNs to create. + size_gb (int): + The requested size of each LUN, in GB. + """ + + quantity: int = proto.Field( + proto.INT32, + number=1, + ) + size_gb: int = proto.Field( + proto.INT32, + number=2, + ) + + class NfsExport(proto.Message): + r"""A NFS export entry. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_id (str): + Network to use to publish the export. + machine_id (str): + Either a single machine, identified by an ID, + or a comma-separated list of machine IDs. + + This field is a member of `oneof`_ ``client``. + cidr (str): + A CIDR range. + + This field is a member of `oneof`_ ``client``. + permissions (google.cloud.bare_metal_solution_v2.types.VolumeConfig.NfsExport.Permissions): + Export permissions. + no_root_squash (bool): + Disable root squashing, which is a feature of + NFS. Root squash is a special mapping of the + remote superuser (root) identity when using + identity authentication. + allow_suid (bool): + Allow the setuid flag. + allow_dev (bool): + Allow dev flag in NfsShare + AllowedClientsRequest. + """ + + class Permissions(proto.Enum): + r"""Permissions that can granted for an export. + + Values: + PERMISSIONS_UNSPECIFIED (0): + Unspecified value. + READ_ONLY (1): + Read-only permission. + READ_WRITE (2): + Read-write permission. + """ + PERMISSIONS_UNSPECIFIED = 0 + READ_ONLY = 1 + READ_WRITE = 2 + + network_id: str = proto.Field( + proto.STRING, + number=1, + ) + machine_id: str = proto.Field( + proto.STRING, + number=2, + oneof="client", + ) + cidr: str = proto.Field( + proto.STRING, + number=3, + oneof="client", + ) + permissions: "VolumeConfig.NfsExport.Permissions" = proto.Field( + proto.ENUM, + number=4, + enum="VolumeConfig.NfsExport.Permissions", + ) + no_root_squash: bool = proto.Field( + proto.BOOL, + number=5, + ) + allow_suid: bool = proto.Field( + proto.BOOL, + number=6, + ) + allow_dev: bool = proto.Field( + proto.BOOL, + number=7, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + snapshots_enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + type_: Type = proto.Field( + proto.ENUM, + number=4, + enum=Type, + ) + protocol: Protocol = proto.Field( + proto.ENUM, + number=5, + enum=Protocol, + ) + size_gb: int = proto.Field( + proto.INT32, + number=6, + ) + lun_ranges: MutableSequence[LunRange] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=LunRange, + ) + machine_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + nfs_exports: MutableSequence[NfsExport] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=NfsExport, + ) + user_note: str = proto.Field( + proto.STRING, + number=10, + ) + gcp_service: str = proto.Field( + proto.STRING, + number=11, + ) + performance_tier: common.VolumePerformanceTier = proto.Field( + proto.ENUM, + number=12, + enum=common.VolumePerformanceTier, + ) + + +class NetworkConfig(proto.Message): + r"""Configuration parameters for a new network. + + Attributes: + name (str): + Output only. The name of the network config. + id (str): + A transient unique identifier to identify a + volume within an ProvisioningConfig request. + type_ (google.cloud.bare_metal_solution_v2.types.NetworkConfig.Type): + The type of this network, either Client or + Private. + bandwidth (google.cloud.bare_metal_solution_v2.types.NetworkConfig.Bandwidth): + Interconnect bandwidth. Set only when type is + CLIENT. + vlan_attachments (MutableSequence[google.cloud.bare_metal_solution_v2.types.NetworkConfig.IntakeVlanAttachment]): + List of VLAN attachments. As of now there are + always 2 attachments, but it is going to change + in the future (multi vlan). + cidr (str): + CIDR range of the network. + service_cidr (google.cloud.bare_metal_solution_v2.types.NetworkConfig.ServiceCidr): + Service CIDR, if any. + user_note (str): + User note field, it can be used by customers + to add additional information for the BMS Ops + team . + gcp_service (str): + The GCP service of the network. Available gcp_service are in + https://cloud.google.com/bare-metal/docs/bms-planning. + vlan_same_project (bool): + Whether the VLAN attachment pair is located + in the same project. + jumbo_frames_enabled (bool): + The JumboFramesEnabled option for customer to + set. + """ + + class Type(proto.Enum): + r"""Network type. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified value. + CLIENT (1): + Client network, that is a network peered to a + GCP VPC. + PRIVATE (2): + Private network, that is a network local to + the BMS POD. + """ + TYPE_UNSPECIFIED = 0 + CLIENT = 1 + PRIVATE = 2 + + class Bandwidth(proto.Enum): + r"""Interconnect bandwidth. + + Values: + BANDWIDTH_UNSPECIFIED (0): + Unspecified value. + BW_1_GBPS (1): + 1 Gbps. + BW_2_GBPS (2): + 2 Gbps. + BW_5_GBPS (3): + 5 Gbps. + BW_10_GBPS (4): + 10 Gbps. + """ + BANDWIDTH_UNSPECIFIED = 0 + BW_1_GBPS = 1 + BW_2_GBPS = 2 + BW_5_GBPS = 3 + BW_10_GBPS = 4 + + class ServiceCidr(proto.Enum): + r"""Service network block. + + Values: + SERVICE_CIDR_UNSPECIFIED (0): + Unspecified value. + DISABLED (1): + Services are disabled for the given network. + HIGH_26 (2): + Use the highest /26 block of the network to + host services. + HIGH_27 (3): + Use the highest /27 block of the network to + host services. + HIGH_28 (4): + Use the highest /28 block of the network to + host services. + """ + SERVICE_CIDR_UNSPECIFIED = 0 + DISABLED = 1 + HIGH_26 = 2 + HIGH_27 = 3 + HIGH_28 = 4 + + class IntakeVlanAttachment(proto.Message): + r"""A GCP vlan attachment. + + Attributes: + id (str): + Identifier of the VLAN attachment. + pairing_key (str): + Attachment pairing key. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + pairing_key: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + type_: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + bandwidth: Bandwidth = proto.Field( + proto.ENUM, + number=4, + enum=Bandwidth, + ) + vlan_attachments: MutableSequence[IntakeVlanAttachment] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=IntakeVlanAttachment, + ) + cidr: str = proto.Field( + proto.STRING, + number=6, + ) + service_cidr: ServiceCidr = proto.Field( + proto.ENUM, + number=7, + enum=ServiceCidr, + ) + user_note: str = proto.Field( + proto.STRING, + number=8, + ) + gcp_service: str = proto.Field( + proto.STRING, + number=9, + ) + vlan_same_project: bool = proto.Field( + proto.BOOL, + number=10, + ) + jumbo_frames_enabled: bool = proto.Field( + proto.BOOL, + number=11, + ) + + +class InstanceQuota(proto.Message): + r"""A resource budget. + + Attributes: + name (str): + Output only. The name of the instance quota. + instance_type (str): + Instance type. Deprecated: use gcp_service. + gcp_service (str): + The gcp service of the provisioning quota. + location (str): + Location where the quota applies. + available_machine_count (int): + Number of machines than can be created for the given + location and instance_type. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + instance_type: str = proto.Field( + proto.STRING, + number=2, + ) + gcp_service: str = proto.Field( + proto.STRING, + number=5, + ) + location: str = proto.Field( + proto.STRING, + number=3, + ) + available_machine_count: int = proto.Field( + proto.INT32, + number=4, + ) + + +class GetProvisioningConfigRequest(proto.Message): + r"""Request for GetProvisioningConfig. + + Attributes: + name (str): + Required. Name of the ProvisioningConfig. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateProvisioningConfigRequest(proto.Message): + r"""Request for CreateProvisioningConfig. + + Attributes: + parent (str): + Required. The parent project and location + containing the ProvisioningConfig. + provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): + Required. The ProvisioningConfig to create. + email (str): + Optional. Email provided to send a + confirmation with provisioning config to. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + provisioning_config: "ProvisioningConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="ProvisioningConfig", + ) + email: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateProvisioningConfigRequest(proto.Message): + r"""Message for updating a ProvisioningConfig. + + Attributes: + provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): + Required. The ProvisioningConfig to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + email (str): + Optional. Email provided to send a + confirmation with provisioning config to. + """ + + provisioning_config: "ProvisioningConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="ProvisioningConfig", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + email: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/ssh_key.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/ssh_key.py new file mode 100644 index 000000000000..b80e9cd8cf12 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/ssh_key.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.baremetalsolution.v2", + manifest={ + "SSHKey", + "ListSSHKeysRequest", + "ListSSHKeysResponse", + "CreateSSHKeyRequest", + "DeleteSSHKeyRequest", + }, +) + + +class SSHKey(proto.Message): + r"""An SSH key, used for authorizing with the interactive serial + console feature. + + Attributes: + name (str): + Output only. The name of this SSH key. + Currently, the only valid value for the location + is "global". + public_key (str): + The public SSH key. This must be in OpenSSH .authorized_keys + format. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + public_key: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListSSHKeysRequest(proto.Message): + r"""Message for listing the public SSH keys in a project. + + Attributes: + parent (str): + Required. The parent containing the SSH keys. + Currently, the only valid value for the location + is "global". + page_size (int): + The maximum number of items to return. + page_token (str): + The next_page_token value returned from a previous List + request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSSHKeysResponse(proto.Message): + r"""Message for response of ListSSHKeys. + + Attributes: + ssh_keys (MutableSequence[google.cloud.bare_metal_solution_v2.types.SSHKey]): + The SSH keys registered in the project. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + ssh_keys: MutableSequence["SSHKey"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SSHKey", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=90, + ) + + +class CreateSSHKeyRequest(proto.Message): + r"""Message for registering a public SSH key in a project. + + Attributes: + parent (str): + Required. The parent containing the SSH keys. + ssh_key (google.cloud.bare_metal_solution_v2.types.SSHKey): + Required. The SSH key to register. + ssh_key_id (str): + Required. The ID to use for the key, which will become the + final component of the key's resource name. + + This value must match the regex: [a-zA-Z0-9@.-_]{1,64} + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + ssh_key: "SSHKey" = proto.Field( + proto.MESSAGE, + number=2, + message="SSHKey", + ) + ssh_key_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteSSHKeyRequest(proto.Message): + r"""Message for deleting an SSH key from a project. + + Attributes: + name (str): + Required. The name of the SSH key to delete. + Currently, the only valid value for the location + is "global". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/volume.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/volume.py index 62cd03dd5fa1..0591ba668d55 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/volume.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/volume.py @@ -18,8 +18,11 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.cloud.bare_metal_solution_v2.types import common + __protobuf__ = proto.module( package="google.cloud.baremetalsolution.v2", manifest={ @@ -28,6 +31,8 @@ "ListVolumesRequest", "ListVolumesResponse", "UpdateVolumeRequest", + "RenameVolumeRequest", + "EvictVolumeRequest", "ResizeVolumeRequest", }, ) @@ -51,6 +56,8 @@ class Volume(proto.Message): requested_size_gib (int): The requested size of this storage volume, in GiB. + originally_requested_size_gib (int): + Originally requested size, in GiB. current_size_gib (int): The current size of this storage volume, in GiB, including space reserved for snapshots. @@ -60,6 +67,9 @@ class Volume(proto.Message): emergency_size_gib (int): Additional emergency size that was requested for this Volume, in GiB. current_size_gib includes this value. + max_size_gib (int): + Maximum size volume can be expanded to in + case of evergency, in GiB. auto_grown_size_gib (int): The size, in GiB, that this storage volume has expanded as a result of an auto grow policy. @@ -80,6 +90,33 @@ class Volume(proto.Message): Whether snapshots are enabled. pod (str): Immutable. Pod name. + protocol (google.cloud.bare_metal_solution_v2.types.Volume.Protocol): + Output only. Storage protocol for the Volume. + boot_volume (bool): + Output only. Whether this volume is a boot + volume. A boot volume is one which contains a + boot LUN. + performance_tier (google.cloud.bare_metal_solution_v2.types.VolumePerformanceTier): + Immutable. Performance tier of the Volume. + Default is SHARED. + notes (str): + Input only. User-specified notes for new + Volume. Used to provision Volumes that require + manual intervention. + workload_profile (google.cloud.bare_metal_solution_v2.types.Volume.WorkloadProfile): + The workload profile for the volume. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time after which volume will be + fully deleted. It is filled only for volumes in + COOLOFF state. + instances (MutableSequence[str]): + Output only. Instances this Volume is + attached to. This field is set only in Get + requests. + attached (bool): + Output only. Is the Volume attached at at least one + instance. This field is a lightweight counterpart of + ``instances`` field. It is filled in List responses as well. """ class StorageType(proto.Enum): @@ -110,11 +147,18 @@ class State(proto.Enum): DELETING (3): The storage volume has been requested to be deleted. + UPDATING (4): + The storage volume is being updated. + COOL_OFF (5): + The storage volume is in cool off state. It will be deleted + after ``expire_time``. """ STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 DELETING = 3 + UPDATING = 4 + COOL_OFF = 5 class SnapshotAutoDeleteBehavior(proto.Enum): r"""The kinds of auto delete behavior to use when snapshot @@ -137,6 +181,38 @@ class SnapshotAutoDeleteBehavior(proto.Enum): OLDEST_FIRST = 2 NEWEST_FIRST = 3 + class Protocol(proto.Enum): + r"""Storage protocol. + + Values: + PROTOCOL_UNSPECIFIED (0): + Value is not specified. + FIBRE_CHANNEL (1): + Fibre Channel protocol. + NFS (2): + NFS protocol means Volume is a NFS Share + volume. Such volumes cannot be manipulated via + Volumes API. + """ + PROTOCOL_UNSPECIFIED = 0 + FIBRE_CHANNEL = 1 + NFS = 2 + + class WorkloadProfile(proto.Enum): + r"""The possible values for a workload profile. + + Values: + WORKLOAD_PROFILE_UNSPECIFIED (0): + The workload profile is in an unknown state. + GENERIC (1): + The workload profile is generic. + HANA (2): + The workload profile is hana. + """ + WORKLOAD_PROFILE_UNSPECIFIED = 0 + GENERIC = 1 + HANA = 2 + class SnapshotReservationDetail(proto.Message): r"""Details about snapshot space reservation and usage on the storage volume. @@ -201,6 +277,10 @@ class SnapshotReservationDetail(proto.Message): proto.INT64, number=4, ) + originally_requested_size_gib: int = proto.Field( + proto.INT64, + number=16, + ) current_size_gib: int = proto.Field( proto.INT64, number=5, @@ -209,6 +289,10 @@ class SnapshotReservationDetail(proto.Message): proto.INT64, number=14, ) + max_size_gib: int = proto.Field( + proto.INT64, + number=17, + ) auto_grown_size_gib: int = proto.Field( proto.INT64, number=6, @@ -240,6 +324,42 @@ class SnapshotReservationDetail(proto.Message): proto.STRING, number=15, ) + protocol: Protocol = proto.Field( + proto.ENUM, + number=18, + enum=Protocol, + ) + boot_volume: bool = proto.Field( + proto.BOOL, + number=19, + ) + performance_tier: common.VolumePerformanceTier = proto.Field( + proto.ENUM, + number=20, + enum=common.VolumePerformanceTier, + ) + notes: str = proto.Field( + proto.STRING, + number=21, + ) + workload_profile: WorkloadProfile = proto.Field( + proto.ENUM, + number=22, + enum=WorkloadProfile, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=24, + message=timestamp_pb2.Timestamp, + ) + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=25, + ) + attached: bool = proto.Field( + proto.BOOL, + number=26, + ) class GetVolumeRequest(proto.Message): @@ -335,11 +455,10 @@ class UpdateVolumeRequest(proto.Message): Format: projects/{project}/locations/{location}/volumes/{volume} update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. The only currently supported - fields are: ``snapshot_auto_delete_behavior`` - ``snapshot_schedule_policy_name`` 'labels' - 'snapshot_enabled' - 'snapshot_reservation_detail.reserved_space_percent' + The list of fields to update. + The only currently supported fields are: + + 'labels' """ volume: "Volume" = proto.Field( @@ -354,6 +473,42 @@ class UpdateVolumeRequest(proto.Message): ) +class RenameVolumeRequest(proto.Message): + r"""Message requesting rename of a server. + + Attributes: + name (str): + Required. The ``name`` field is used to identify the volume. + Format: + projects/{project}/locations/{location}/volumes/{volume} + new_volume_id (str): + Required. The new ``id`` of the volume. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + new_volume_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class EvictVolumeRequest(proto.Message): + r"""Request for skip volume cooloff and delete it. + + Attributes: + name (str): + Required. The name of the Volume. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class ResizeVolumeRequest(proto.Message): r"""Request for emergency resize Volume. diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/volume_snapshot.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/volume_snapshot.py new file mode 100644 index 000000000000..38ad30101efd --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/types/volume_snapshot.py @@ -0,0 +1,230 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.baremetalsolution.v2", + manifest={ + "VolumeSnapshot", + "GetVolumeSnapshotRequest", + "ListVolumeSnapshotsRequest", + "ListVolumeSnapshotsResponse", + "DeleteVolumeSnapshotRequest", + "CreateVolumeSnapshotRequest", + "RestoreVolumeSnapshotRequest", + }, +) + + +class VolumeSnapshot(proto.Message): + r"""A snapshot of a volume. Only boot volumes can have snapshots. + + Attributes: + name (str): + The name of the snapshot. + id (str): + Output only. An identifier for the snapshot, + generated by the backend. + description (str): + The description of the snapshot. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time of the + snapshot. + storage_volume (str): + Output only. The name of the volume which + this snapshot belongs to. + type_ (google.cloud.bare_metal_solution_v2.types.VolumeSnapshot.SnapshotType): + Output only. The type of the snapshot which + indicates whether it was scheduled or + manual/ad-hoc. + """ + + class SnapshotType(proto.Enum): + r"""Represents the type of a snapshot. + + Values: + SNAPSHOT_TYPE_UNSPECIFIED (0): + Type is not specified. + AD_HOC (1): + Snapshot was taken manually by user. + SCHEDULED (2): + Snapshot was taken automatically as a part of + a snapshot schedule. + """ + SNAPSHOT_TYPE_UNSPECIFIED = 0 + AD_HOC = 1 + SCHEDULED = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + storage_volume: str = proto.Field( + proto.STRING, + number=5, + ) + type_: SnapshotType = proto.Field( + proto.ENUM, + number=7, + enum=SnapshotType, + ) + + +class GetVolumeSnapshotRequest(proto.Message): + r"""Message for requesting volume snapshot information. + + Attributes: + name (str): + Required. The name of the snapshot. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListVolumeSnapshotsRequest(proto.Message): + r"""Message for requesting a list of volume snapshots. + + Attributes: + parent (str): + Required. Parent value for + ListVolumesRequest. + page_size (int): + Requested page size. The server might return + fewer items than requested. If unspecified, + server will pick an appropriate default. + page_token (str): + A token identifying a page of results from + the server. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListVolumeSnapshotsResponse(proto.Message): + r"""Response message containing the list of volume snapshots. + + Attributes: + volume_snapshots (MutableSequence[google.cloud.bare_metal_solution_v2.types.VolumeSnapshot]): + The list of snapshots. + next_page_token (str): + A token identifying a page of results from + the server. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + volume_snapshots: MutableSequence["VolumeSnapshot"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VolumeSnapshot", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeleteVolumeSnapshotRequest(proto.Message): + r"""Message for deleting named Volume snapshot. + + Attributes: + name (str): + Required. The name of the snapshot to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateVolumeSnapshotRequest(proto.Message): + r"""Message for creating a volume snapshot. + + Attributes: + parent (str): + Required. The volume to snapshot. + volume_snapshot (google.cloud.bare_metal_solution_v2.types.VolumeSnapshot): + Required. The snapshot to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + volume_snapshot: "VolumeSnapshot" = proto.Field( + proto.MESSAGE, + number=2, + message="VolumeSnapshot", + ) + + +class RestoreVolumeSnapshotRequest(proto.Message): + r"""Message for restoring a volume snapshot. + + Attributes: + volume_snapshot (str): + Required. Name of the snapshot which will be + used to restore its parent volume. + """ + + volume_snapshot: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bare-metal-solution/noxfile.py b/packages/google-cloud-bare-metal-solution/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-bare-metal-solution/noxfile.py +++ b/packages/google-cloud-bare-metal-solution/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_async.py new file mode 100644 index 000000000000..0ef658c522e7 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateNfsShare +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_create_nfs_share(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateNfsShareRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_nfs_share(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_sync.py new file mode 100644 index 000000000000..d6697c8709f5 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateNfsShare +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_create_nfs_share(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateNfsShareRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_nfs_share(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_async.py new file mode 100644 index 000000000000..d573d226b52c --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateProvisioningConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_create_provisioning_config(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateProvisioningConfigRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_provisioning_config(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_sync.py new file mode 100644 index 000000000000..3ded9b10e21d --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateProvisioningConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_create_provisioning_config(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateProvisioningConfigRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_provisioning_config(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_async.py new file mode 100644 index 000000000000..727f58df4815 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSSHKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_create_ssh_key(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateSSHKeyRequest( + parent="parent_value", + ssh_key_id="ssh_key_id_value", + ) + + # Make the request + response = await client.create_ssh_key(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_sync.py new file mode 100644 index 000000000000..1e0525822a42 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSSHKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_create_ssh_key(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateSSHKeyRequest( + parent="parent_value", + ssh_key_id="ssh_key_id_value", + ) + + # Make the request + response = client.create_ssh_key(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_async.py new file mode 100644 index 000000000000..f33d02db7e68 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateVolumeSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_create_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateVolumeSnapshotRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_volume_snapshot(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_sync.py new file mode 100644 index 000000000000..f666fdb1839e --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateVolumeSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_create_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.CreateVolumeSnapshotRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_volume_snapshot(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_async.py new file mode 100644 index 000000000000..18de684aa095 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteNfsShare +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_delete_nfs_share(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DeleteNfsShareRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_nfs_share(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_sync.py new file mode 100644 index 000000000000..d000605ab2df --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteNfsShare +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_delete_nfs_share(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DeleteNfsShareRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_nfs_share(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_async.py new file mode 100644 index 000000000000..e5eaaa9d17c5 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSSHKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_delete_ssh_key(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DeleteSSHKeyRequest( + name="name_value", + ) + + # Make the request + await client.delete_ssh_key(request=request) + + +# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_sync.py new file mode 100644 index 000000000000..f5dd0a279071 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSSHKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_delete_ssh_key(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DeleteSSHKeyRequest( + name="name_value", + ) + + # Make the request + client.delete_ssh_key(request=request) + + +# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_async.py new file mode 100644 index 000000000000..cf848638de66 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteVolumeSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_delete_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DeleteVolumeSnapshotRequest( + name="name_value", + ) + + # Make the request + await client.delete_volume_snapshot(request=request) + + +# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_sync.py new file mode 100644 index 000000000000..9309a7f4a90b --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteVolumeSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_delete_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DeleteVolumeSnapshotRequest( + name="name_value", + ) + + # Make the request + client.delete_volume_snapshot(request=request) + + +# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_async.py new file mode 100644 index 000000000000..816b2d5e0dd9 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableInteractiveSerialConsole +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_disable_interactive_serial_console(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DisableInteractiveSerialConsoleRequest( + name="name_value", + ) + + # Make the request + operation = client.disable_interactive_serial_console(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_sync.py new file mode 100644 index 000000000000..ac3c284ba521 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DisableInteractiveSerialConsole +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_disable_interactive_serial_console(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.DisableInteractiveSerialConsoleRequest( + name="name_value", + ) + + # Make the request + operation = client.disable_interactive_serial_console(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_async.py new file mode 100644 index 000000000000..675768a5b260 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableInteractiveSerialConsole +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_enable_interactive_serial_console(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.EnableInteractiveSerialConsoleRequest( + name="name_value", + ) + + # Make the request + operation = client.enable_interactive_serial_console(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_sync.py new file mode 100644 index 000000000000..880c3efe964e --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EnableInteractiveSerialConsole +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_enable_interactive_serial_console(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.EnableInteractiveSerialConsoleRequest( + name="name_value", + ) + + # Make the request + operation = client.enable_interactive_serial_console(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_async.py new file mode 100644 index 000000000000..00020e974b66 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EvictLun +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_EvictLun_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_evict_lun(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.EvictLunRequest( + name="name_value", + ) + + # Make the request + operation = client.evict_lun(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_EvictLun_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_sync.py new file mode 100644 index 000000000000..40dcd4a3518b --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EvictLun +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_EvictLun_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_evict_lun(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.EvictLunRequest( + name="name_value", + ) + + # Make the request + operation = client.evict_lun(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_EvictLun_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_async.py new file mode 100644 index 000000000000..bb76c3f3c3fd --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EvictVolume +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_evict_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.EvictVolumeRequest( + name="name_value", + ) + + # Make the request + operation = client.evict_volume(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_sync.py new file mode 100644 index 000000000000..d3d5260bb2e2 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EvictVolume +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_evict_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.EvictVolumeRequest( + name="name_value", + ) + + # Make the request + operation = client.evict_volume(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_async.py new file mode 100644 index 000000000000..47303c283caa --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProvisioningConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_get_provisioning_config(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetProvisioningConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_provisioning_config(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_sync.py new file mode 100644 index 000000000000..c7eff0e0acd9 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProvisioningConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_get_provisioning_config(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetProvisioningConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_provisioning_config(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_async.py new file mode 100644 index 000000000000..f93b7d16ec5f --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetVolumeSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_get_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetVolumeSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_volume_snapshot(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_sync.py new file mode 100644 index 000000000000..b75fb53f66c4 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetVolumeSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_get_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.GetVolumeSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_volume_snapshot(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_async.py new file mode 100644 index 000000000000..ba7cd602e3ce --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOSImages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_list_os_images(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListOSImagesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_os_images(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_sync.py new file mode 100644 index 000000000000..386035d930cf --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOSImages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_list_os_images(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListOSImagesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_os_images(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_async.py new file mode 100644 index 000000000000..cceb7aaf642a --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListProvisioningQuotas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_list_provisioning_quotas(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListProvisioningQuotasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_provisioning_quotas(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_sync.py new file mode 100644 index 000000000000..17d688b99d9c --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListProvisioningQuotas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_list_provisioning_quotas(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListProvisioningQuotasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_provisioning_quotas(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_async.py new file mode 100644 index 000000000000..754c68e7e310 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSSHKeys +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_list_ssh_keys(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListSSHKeysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_ssh_keys(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_sync.py new file mode 100644 index 000000000000..8af99511cc0b --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSSHKeys +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_list_ssh_keys(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListSSHKeysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_ssh_keys(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_async.py new file mode 100644 index 000000000000..61e44b52adf4 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListVolumeSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_list_volume_snapshots(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListVolumeSnapshotsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_volume_snapshots(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_sync.py new file mode 100644 index 000000000000..32a72654ac41 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListVolumeSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_list_volume_snapshots(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.ListVolumeSnapshotsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_volume_snapshots(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_async.py new file mode 100644 index 000000000000..e25c7ba9f738 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_rename_instance(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameInstanceRequest( + name="name_value", + new_instance_id="new_instance_id_value", + ) + + # Make the request + response = await client.rename_instance(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_sync.py new file mode 100644 index 000000000000..f9310aa919f4 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_rename_instance(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameInstanceRequest( + name="name_value", + new_instance_id="new_instance_id_value", + ) + + # Make the request + response = client.rename_instance(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_async.py new file mode 100644 index 000000000000..3bc840c0fc7f --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameNetwork +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_rename_network(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameNetworkRequest( + name="name_value", + new_network_id="new_network_id_value", + ) + + # Make the request + response = await client.rename_network(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_sync.py new file mode 100644 index 000000000000..ddb9a99160b2 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameNetwork +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_rename_network(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameNetworkRequest( + name="name_value", + new_network_id="new_network_id_value", + ) + + # Make the request + response = client.rename_network(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_async.py new file mode 100644 index 000000000000..c813be35070a --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameNfsShare +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_rename_nfs_share(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameNfsShareRequest( + name="name_value", + new_nfsshare_id="new_nfsshare_id_value", + ) + + # Make the request + response = await client.rename_nfs_share(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_sync.py new file mode 100644 index 000000000000..c10edab1a14a --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameNfsShare +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_rename_nfs_share(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameNfsShareRequest( + name="name_value", + new_nfsshare_id="new_nfsshare_id_value", + ) + + # Make the request + response = client.rename_nfs_share(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_async.py new file mode 100644 index 000000000000..ac36d9fe07a2 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameVolume +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_rename_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameVolumeRequest( + name="name_value", + new_volume_id="new_volume_id_value", + ) + + # Make the request + response = await client.rename_volume(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_sync.py new file mode 100644 index 000000000000..71bbf6e145be --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameVolume +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_rename_volume(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RenameVolumeRequest( + name="name_value", + new_volume_id="new_volume_id_value", + ) + + # Make the request + response = client.rename_volume(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_async.py new file mode 100644 index 000000000000..27c6e9d84a87 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreVolumeSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_restore_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RestoreVolumeSnapshotRequest( + volume_snapshot="volume_snapshot_value", + ) + + # Make the request + operation = client.restore_volume_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_sync.py new file mode 100644 index 000000000000..9f5a1128b8d3 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreVolumeSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_restore_volume_snapshot(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.RestoreVolumeSnapshotRequest( + volume_snapshot="volume_snapshot_value", + ) + + # Make the request + operation = client.restore_volume_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_async.py new file mode 100644 index 000000000000..d24c22e988bf --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubmitProvisioningConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_submit_provisioning_config(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.SubmitProvisioningConfigRequest( + parent="parent_value", + ) + + # Make the request + response = await client.submit_provisioning_config(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_sync.py new file mode 100644 index 000000000000..abb4170b35f2 --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubmitProvisioningConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_submit_provisioning_config(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.SubmitProvisioningConfigRequest( + parent="parent_value", + ) + + # Make the request + response = client.submit_provisioning_config(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_async.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_async.py new file mode 100644 index 000000000000..2b2dc360ca3b --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateProvisioningConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +async def sample_update_provisioning_config(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.UpdateProvisioningConfigRequest( + ) + + # Make the request + response = await client.update_provisioning_config(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_async] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_sync.py b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_sync.py new file mode 100644 index 000000000000..cd94acc7709b --- /dev/null +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateProvisioningConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bare-metal-solution + + +# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bare_metal_solution_v2 + + +def sample_update_provisioning_config(): + # Create a client + client = bare_metal_solution_v2.BareMetalSolutionClient() + + # Initialize request argument(s) + request = bare_metal_solution_v2.UpdateProvisioningConfigRequest( + ) + + # Make the request + response = client.update_provisioning_config(request=request) + + # Handle the response + print(response) + +# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_sync] diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json index 99c48f953bdd..392dd697d5f5 100644 --- a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bare-metal-solution", - "version": "0.1.0" + "version": "1.5.0" }, "snippets": [ { @@ -19,27 +19,27 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.detach_lun", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.create_nfs_share", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DetachLun", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateNfsShare", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "DetachLun" + "shortName": "CreateNfsShare" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DetachLunRequest" + "type": "google.cloud.bare_metal_solution_v2.types.CreateNfsShareRequest" }, { - "name": "instance", + "name": "parent", "type": "str" }, { - "name": "lun", - "type": "str" + "name": "nfs_share", + "type": "google.cloud.bare_metal_solution_v2.types.NfsShare" }, { "name": "retry", @@ -55,21 +55,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "detach_lun" + "shortName": "create_nfs_share" }, - "description": "Sample for DetachLun", - "file": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_async.py", + "description": "Sample for CreateNfsShare", + "file": "baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DetachLun_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_async", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -79,22 +79,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_async.py" }, { "canonical": true, @@ -103,27 +103,27 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.detach_lun", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.create_nfs_share", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DetachLun", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateNfsShare", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "DetachLun" + "shortName": "CreateNfsShare" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DetachLunRequest" + "type": "google.cloud.bare_metal_solution_v2.types.CreateNfsShareRequest" }, { - "name": "instance", + "name": "parent", "type": "str" }, { - "name": "lun", - "type": "str" + "name": "nfs_share", + "type": "google.cloud.bare_metal_solution_v2.types.NfsShare" }, { "name": "retry", @@ -139,21 +139,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "detach_lun" + "shortName": "create_nfs_share" }, - "description": "Sample for DetachLun", - "file": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_sync.py", + "description": "Sample for CreateNfsShare", + "file": "baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DetachLun_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_sync", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -163,22 +163,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_sync.py" }, { "canonical": true, @@ -188,24 +188,28 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_instance", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.create_provisioning_config", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetInstance", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateProvisioningConfig", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "GetInstance" + "shortName": "CreateProvisioningConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetInstanceRequest" + "type": "google.cloud.bare_metal_solution_v2.types.CreateProvisioningConfigRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "provisioning_config", + "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -219,14 +223,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Instance", - "shortName": "get_instance" + "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", + "shortName": "create_provisioning_config" }, - "description": "Sample for GetInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_async.py", + "description": "Sample for CreateProvisioningConfig", + "file": "baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetInstance_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_async", "segments": [ { "end": 51, @@ -259,7 +263,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_async.py" }, { "canonical": true, @@ -268,24 +272,28 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_instance", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.create_provisioning_config", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetInstance", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateProvisioningConfig", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "GetInstance" + "shortName": "CreateProvisioningConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetInstanceRequest" + "type": "google.cloud.bare_metal_solution_v2.types.CreateProvisioningConfigRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "provisioning_config", + "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -299,14 +307,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Instance", - "shortName": "get_instance" + "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", + "shortName": "create_provisioning_config" }, - "description": "Sample for GetInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_sync.py", + "description": "Sample for CreateProvisioningConfig", + "file": "baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetInstance_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_sync", "segments": [ { "end": 51, @@ -339,7 +347,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_sync.py" }, { "canonical": true, @@ -349,22 +357,30 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_lun", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.create_ssh_key", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetLun", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateSSHKey", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "GetLun" + "shortName": "CreateSSHKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetLunRequest" + "type": "google.cloud.bare_metal_solution_v2.types.CreateSSHKeyRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "ssh_key", + "type": "google.cloud.bare_metal_solution_v2.types.SSHKey" + }, + { + "name": "ssh_key_id", "type": "str" }, { @@ -380,22 +396,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Lun", - "shortName": "get_lun" + "resultType": "google.cloud.bare_metal_solution_v2.types.SSHKey", + "shortName": "create_ssh_key" }, - "description": "Sample for GetLun", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_async.py", + "description": "Sample for CreateSSHKey", + "file": "baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetLun_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -405,22 +421,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_async.py" }, { "canonical": true, @@ -429,22 +445,30 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_lun", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.create_ssh_key", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetLun", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateSSHKey", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "GetLun" + "shortName": "CreateSSHKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetLunRequest" + "type": "google.cloud.bare_metal_solution_v2.types.CreateSSHKeyRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "ssh_key", + "type": "google.cloud.bare_metal_solution_v2.types.SSHKey" + }, + { + "name": "ssh_key_id", "type": "str" }, { @@ -460,22 +484,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Lun", - "shortName": "get_lun" + "resultType": "google.cloud.bare_metal_solution_v2.types.SSHKey", + "shortName": "create_ssh_key" }, - "description": "Sample for GetLun", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_sync.py", + "description": "Sample for CreateSSHKey", + "file": "baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetLun_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -485,22 +509,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_sync.py" }, { "canonical": true, @@ -510,24 +534,28 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_network", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.create_volume_snapshot", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNetwork", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateVolumeSnapshot", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "GetNetwork" + "shortName": "CreateVolumeSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetNetworkRequest" + "type": "google.cloud.bare_metal_solution_v2.types.CreateVolumeSnapshotRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "volume_snapshot", + "type": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -541,14 +569,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Network", - "shortName": "get_network" + "resultType": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot", + "shortName": "create_volume_snapshot" }, - "description": "Sample for GetNetwork", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_network_async.py", + "description": "Sample for CreateVolumeSnapshot", + "file": "baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNetwork_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_async", "segments": [ { "end": 51, @@ -581,7 +609,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_network_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_async.py" }, { "canonical": true, @@ -590,24 +618,28 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_network", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.create_volume_snapshot", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNetwork", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateVolumeSnapshot", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "GetNetwork" + "shortName": "CreateVolumeSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetNetworkRequest" + "type": "google.cloud.bare_metal_solution_v2.types.CreateVolumeSnapshotRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "volume_snapshot", + "type": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -621,14 +653,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Network", - "shortName": "get_network" + "resultType": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot", + "shortName": "create_volume_snapshot" }, - "description": "Sample for GetNetwork", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_network_sync.py", + "description": "Sample for CreateVolumeSnapshot", + "file": "baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNetwork_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_sync", "segments": [ { "end": 51, @@ -661,7 +693,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_network_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_sync.py" }, { "canonical": true, @@ -671,19 +703,19 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_nfs_share", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.delete_nfs_share", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNfsShare", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteNfsShare", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "GetNfsShare" + "shortName": "DeleteNfsShare" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest" + "type": "google.cloud.bare_metal_solution_v2.types.DeleteNfsShareRequest" }, { "name": "name", @@ -702,22 +734,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.NfsShare", - "shortName": "get_nfs_share" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_nfs_share" }, - "description": "Sample for GetNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_async.py", + "description": "Sample for DeleteNfsShare", + "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNfsShare_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -732,17 +764,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_async.py" }, { "canonical": true, @@ -751,19 +783,19 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_nfs_share", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.delete_nfs_share", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNfsShare", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteNfsShare", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "GetNfsShare" + "shortName": "DeleteNfsShare" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest" + "type": "google.cloud.bare_metal_solution_v2.types.DeleteNfsShareRequest" }, { "name": "name", @@ -782,22 +814,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.NfsShare", - "shortName": "get_nfs_share" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_nfs_share" }, - "description": "Sample for GetNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_sync.py", + "description": "Sample for DeleteNfsShare", + "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNfsShare_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -812,17 +844,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_sync.py" }, { "canonical": true, @@ -832,19 +864,19 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_volume", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.delete_ssh_key", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetVolume", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteSSHKey", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "GetVolume" + "shortName": "DeleteSSHKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetVolumeRequest" + "type": "google.cloud.bare_metal_solution_v2.types.DeleteSSHKeyRequest" }, { "name": "name", @@ -863,22 +895,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Volume", - "shortName": "get_volume" + "shortName": "delete_ssh_key" }, - "description": "Sample for GetVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_async.py", + "description": "Sample for DeleteSSHKey", + "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetVolume_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_async", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -893,17 +924,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_async.py" }, { "canonical": true, @@ -912,19 +941,19 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_volume", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.delete_ssh_key", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetVolume", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteSSHKey", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "GetVolume" + "shortName": "DeleteSSHKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetVolumeRequest" + "type": "google.cloud.bare_metal_solution_v2.types.DeleteSSHKeyRequest" }, { "name": "name", @@ -943,22 +972,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Volume", - "shortName": "get_volume" + "shortName": "delete_ssh_key" }, - "description": "Sample for GetVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_sync.py", + "description": "Sample for DeleteSSHKey", + "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetVolume_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_sync", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -973,17 +1001,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_sync.py" }, { "canonical": true, @@ -993,22 +1019,22 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_instances", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.delete_volume_snapshot", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListInstances", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteVolumeSnapshot", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListInstances" + "shortName": "DeleteVolumeSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListInstancesRequest" + "type": "google.cloud.bare_metal_solution_v2.types.DeleteVolumeSnapshotRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1024,22 +1050,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListInstancesAsyncPager", - "shortName": "list_instances" + "shortName": "delete_volume_snapshot" }, - "description": "Sample for ListInstances", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_async.py", + "description": "Sample for DeleteVolumeSnapshot", + "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListInstances_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_async", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1054,17 +1079,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_async.py" }, { "canonical": true, @@ -1073,22 +1096,22 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_instances", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.delete_volume_snapshot", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListInstances", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteVolumeSnapshot", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListInstances" + "shortName": "DeleteVolumeSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListInstancesRequest" + "type": "google.cloud.bare_metal_solution_v2.types.DeleteVolumeSnapshotRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1104,22 +1127,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListInstancesPager", - "shortName": "list_instances" + "shortName": "delete_volume_snapshot" }, - "description": "Sample for ListInstances", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_sync.py", + "description": "Sample for DeleteVolumeSnapshot", + "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListInstances_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1134,17 +1156,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_sync.py" }, { "canonical": true, @@ -1154,22 +1174,26 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_luns", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.detach_lun", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListLuns", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DetachLun", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListLuns" + "shortName": "DetachLun" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListLunsRequest" + "type": "google.cloud.bare_metal_solution_v2.types.DetachLunRequest" }, { - "name": "parent", + "name": "instance", + "type": "str" + }, + { + "name": "lun", "type": "str" }, { @@ -1185,22 +1209,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsAsyncPager", - "shortName": "list_luns" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "detach_lun" }, - "description": "Sample for ListLuns", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_async.py", + "description": "Sample for DetachLun", + "file": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListLuns_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DetachLun_async", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -1210,22 +1234,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_async.py" }, { "canonical": true, @@ -1234,22 +1258,26 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_luns", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.detach_lun", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListLuns", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DetachLun", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListLuns" + "shortName": "DetachLun" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListLunsRequest" + "type": "google.cloud.bare_metal_solution_v2.types.DetachLunRequest" }, { - "name": "parent", + "name": "instance", + "type": "str" + }, + { + "name": "lun", "type": "str" }, { @@ -1265,22 +1293,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsPager", - "shortName": "list_luns" + "resultType": "google.api_core.operation.Operation", + "shortName": "detach_lun" }, - "description": "Sample for ListLuns", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_sync.py", + "description": "Sample for DetachLun", + "file": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListLuns_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DetachLun_sync", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -1290,22 +1318,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_sync.py" }, { "canonical": true, @@ -1315,22 +1343,22 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_network_usage", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.disable_interactive_serial_console", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworkUsage", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DisableInteractiveSerialConsole", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListNetworkUsage" + "shortName": "DisableInteractiveSerialConsole" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest" + "type": "google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleRequest" }, { - "name": "location", + "name": "name", "type": "str" }, { @@ -1346,22 +1374,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse", - "shortName": "list_network_usage" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "disable_interactive_serial_console" }, - "description": "Sample for ListNetworkUsage", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_async.py", + "description": "Sample for DisableInteractiveSerialConsole", + "file": "baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworkUsage_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1376,17 +1404,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_async.py" }, { "canonical": true, @@ -1395,22 +1423,22 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_network_usage", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.disable_interactive_serial_console", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworkUsage", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DisableInteractiveSerialConsole", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListNetworkUsage" + "shortName": "DisableInteractiveSerialConsole" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest" + "type": "google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleRequest" }, { - "name": "location", + "name": "name", "type": "str" }, { @@ -1426,22 +1454,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse", - "shortName": "list_network_usage" + "resultType": "google.api_core.operation.Operation", + "shortName": "disable_interactive_serial_console" }, - "description": "Sample for ListNetworkUsage", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_sync.py", + "description": "Sample for DisableInteractiveSerialConsole", + "file": "baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworkUsage_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1456,17 +1484,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_sync.py" }, { "canonical": true, @@ -1476,22 +1504,22 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_networks", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.enable_interactive_serial_console", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworks", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EnableInteractiveSerialConsole", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListNetworks" + "shortName": "EnableInteractiveSerialConsole" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNetworksRequest" + "type": "google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1507,22 +1535,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksAsyncPager", - "shortName": "list_networks" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "enable_interactive_serial_console" }, - "description": "Sample for ListNetworks", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_async.py", + "description": "Sample for EnableInteractiveSerialConsole", + "file": "baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworks_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1537,17 +1565,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_async.py" }, { "canonical": true, @@ -1556,22 +1584,22 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_networks", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.enable_interactive_serial_console", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworks", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EnableInteractiveSerialConsole", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListNetworks" + "shortName": "EnableInteractiveSerialConsole" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNetworksRequest" + "type": "google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1587,22 +1615,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksPager", - "shortName": "list_networks" + "resultType": "google.api_core.operation.Operation", + "shortName": "enable_interactive_serial_console" }, - "description": "Sample for ListNetworks", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_sync.py", + "description": "Sample for EnableInteractiveSerialConsole", + "file": "baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworks_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1617,17 +1645,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_sync.py" }, { "canonical": true, @@ -1637,22 +1665,22 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_nfs_shares", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.evict_lun", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNfsShares", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EvictLun", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListNfsShares" + "shortName": "EvictLun" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest" + "type": "google.cloud.bare_metal_solution_v2.types.EvictLunRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1668,22 +1696,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesAsyncPager", - "shortName": "list_nfs_shares" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "evict_lun" }, - "description": "Sample for ListNfsShares", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_async.py", + "description": "Sample for EvictLun", + "file": "baremetalsolution_v2_generated_bare_metal_solution_evict_lun_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNfsShares_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EvictLun_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1698,17 +1726,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_evict_lun_async.py" }, { "canonical": true, @@ -1717,22 +1745,22 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_nfs_shares", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.evict_lun", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNfsShares", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EvictLun", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListNfsShares" + "shortName": "EvictLun" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest" + "type": "google.cloud.bare_metal_solution_v2.types.EvictLunRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1748,22 +1776,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesPager", - "shortName": "list_nfs_shares" + "resultType": "google.api_core.operation.Operation", + "shortName": "evict_lun" }, - "description": "Sample for ListNfsShares", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_sync.py", + "description": "Sample for EvictLun", + "file": "baremetalsolution_v2_generated_bare_metal_solution_evict_lun_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNfsShares_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EvictLun_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1778,17 +1806,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_evict_lun_sync.py" }, { "canonical": true, @@ -1798,22 +1826,22 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_volumes", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.evict_volume", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListVolumes", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EvictVolume", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListVolumes" + "shortName": "EvictVolume" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListVolumesRequest" + "type": "google.cloud.bare_metal_solution_v2.types.EvictVolumeRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1829,22 +1857,263 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesAsyncPager", - "shortName": "list_volumes" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "evict_volume" }, - "description": "Sample for ListVolumes", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_async.py", + "description": "Sample for EvictVolume", + "file": "baremetalsolution_v2_generated_bare_metal_solution_evict_volume_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListVolumes_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_evict_volume_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.evict_volume", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EvictVolume", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "EvictVolume" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.EvictVolumeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "evict_volume" + }, + "description": "Sample for EvictVolume", + "file": "baremetalsolution_v2_generated_bare_metal_solution_evict_volume_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_evict_volume_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_instance", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetInstance", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_instance", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetInstance", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, "start": 27, "type": "SHORT" }, @@ -1864,12 +2133,3345 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_lun", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetLun", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetLun" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetLunRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Lun", + "shortName": "get_lun" + }, + "description": "Sample for GetLun", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetLun_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_lun", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetLun", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetLun" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetLunRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Lun", + "shortName": "get_lun" + }, + "description": "Sample for GetLun", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetLun_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_network", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNetwork", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetNetworkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Network", + "shortName": "get_network" + }, + "description": "Sample for GetNetwork", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_network_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNetwork_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_network_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_network", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNetwork", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetNetworkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Network", + "shortName": "get_network" + }, + "description": "Sample for GetNetwork", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_network_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNetwork_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_network_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_nfs_share", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNfsShare", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetNfsShare" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.NfsShare", + "shortName": "get_nfs_share" + }, + "description": "Sample for GetNfsShare", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNfsShare_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_nfs_share", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNfsShare", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetNfsShare" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.NfsShare", + "shortName": "get_nfs_share" + }, + "description": "Sample for GetNfsShare", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNfsShare_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_provisioning_config", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetProvisioningConfig", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetProvisioningConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetProvisioningConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", + "shortName": "get_provisioning_config" + }, + "description": "Sample for GetProvisioningConfig", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_provisioning_config", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetProvisioningConfig", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetProvisioningConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetProvisioningConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", + "shortName": "get_provisioning_config" + }, + "description": "Sample for GetProvisioningConfig", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_volume_snapshot", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetVolumeSnapshot", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetVolumeSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetVolumeSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot", + "shortName": "get_volume_snapshot" + }, + "description": "Sample for GetVolumeSnapshot", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_volume_snapshot", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetVolumeSnapshot", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetVolumeSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetVolumeSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot", + "shortName": "get_volume_snapshot" + }, + "description": "Sample for GetVolumeSnapshot", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_volume", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetVolume", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetVolume" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetVolumeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Volume", + "shortName": "get_volume" + }, + "description": "Sample for GetVolume", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetVolume_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_volume", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetVolume", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "GetVolume" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.GetVolumeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Volume", + "shortName": "get_volume" + }, + "description": "Sample for GetVolume", + "file": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetVolume_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_instances", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListInstances", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListInstances_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_instances", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListInstances", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListInstances_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_luns", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListLuns", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListLuns" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListLunsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsAsyncPager", + "shortName": "list_luns" + }, + "description": "Sample for ListLuns", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListLuns_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_luns", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListLuns", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListLuns" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListLunsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsPager", + "shortName": "list_luns" + }, + "description": "Sample for ListLuns", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListLuns_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_network_usage", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworkUsage", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListNetworkUsage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest" + }, + { + "name": "location", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse", + "shortName": "list_network_usage" + }, + "description": "Sample for ListNetworkUsage", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworkUsage_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_network_usage", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworkUsage", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListNetworkUsage" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest" + }, + { + "name": "location", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse", + "shortName": "list_network_usage" + }, + "description": "Sample for ListNetworkUsage", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworkUsage_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_networks", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworks", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListNetworks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListNetworksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksAsyncPager", + "shortName": "list_networks" + }, + "description": "Sample for ListNetworks", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_networks", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworks", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListNetworks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListNetworksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksPager", + "shortName": "list_networks" + }, + "description": "Sample for ListNetworks", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_nfs_shares", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNfsShares", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListNfsShares" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesAsyncPager", + "shortName": "list_nfs_shares" + }, + "description": "Sample for ListNfsShares", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNfsShares_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_nfs_shares", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNfsShares", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListNfsShares" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesPager", + "shortName": "list_nfs_shares" + }, + "description": "Sample for ListNfsShares", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNfsShares_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_os_images", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListOSImages", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListOSImages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListOSImagesAsyncPager", + "shortName": "list_os_images" + }, + "description": "Sample for ListOSImages", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_os_images_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_os_images_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_os_images", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListOSImages", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListOSImages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListOSImagesPager", + "shortName": "list_os_images" + }, + "description": "Sample for ListOSImages", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_os_images_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_os_images_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_provisioning_quotas", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListProvisioningQuotas", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListProvisioningQuotas" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListProvisioningQuotasAsyncPager", + "shortName": "list_provisioning_quotas" + }, + "description": "Sample for ListProvisioningQuotas", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_provisioning_quotas", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListProvisioningQuotas", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListProvisioningQuotas" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListProvisioningQuotasPager", + "shortName": "list_provisioning_quotas" + }, + "description": "Sample for ListProvisioningQuotas", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_ssh_keys", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListSSHKeys", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListSSHKeys" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListSSHKeysAsyncPager", + "shortName": "list_ssh_keys" + }, + "description": "Sample for ListSSHKeys", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_ssh_keys", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListSSHKeys", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListSSHKeys" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListSSHKeysPager", + "shortName": "list_ssh_keys" + }, + "description": "Sample for ListSSHKeys", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_volume_snapshots", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListVolumeSnapshots", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListVolumeSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumeSnapshotsAsyncPager", + "shortName": "list_volume_snapshots" + }, + "description": "Sample for ListVolumeSnapshots", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_volume_snapshots", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListVolumeSnapshots", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListVolumeSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumeSnapshotsPager", + "shortName": "list_volume_snapshots" + }, + "description": "Sample for ListVolumeSnapshots", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_volumes", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListVolumes", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListVolumes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListVolumesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesAsyncPager", + "shortName": "list_volumes" + }, + "description": "Sample for ListVolumes", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListVolumes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_volumes", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListVolumes", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ListVolumes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ListVolumesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesPager", + "shortName": "list_volumes" + }, + "description": "Sample for ListVolumes", + "file": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListVolumes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.rename_instance", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameInstance", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "RenameInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.RenameInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Instance", + "shortName": "rename_instance" + }, + "description": "Sample for RenameInstance", + "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.rename_instance", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameInstance", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "RenameInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.RenameInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Instance", + "shortName": "rename_instance" + }, + "description": "Sample for RenameInstance", + "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.rename_network", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameNetwork", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "RenameNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.RenameNetworkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_network_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Network", + "shortName": "rename_network" + }, + "description": "Sample for RenameNetwork", + "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_network_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_network_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.rename_network", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameNetwork", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "RenameNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.RenameNetworkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_network_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Network", + "shortName": "rename_network" + }, + "description": "Sample for RenameNetwork", + "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_network_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_network_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.rename_nfs_share", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameNfsShare", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "RenameNfsShare" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.RenameNfsShareRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_nfsshare_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.NfsShare", + "shortName": "rename_nfs_share" + }, + "description": "Sample for RenameNfsShare", + "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.rename_nfs_share", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameNfsShare", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "RenameNfsShare" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.RenameNfsShareRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_nfsshare_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.NfsShare", + "shortName": "rename_nfs_share" + }, + "description": "Sample for RenameNfsShare", + "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.rename_volume", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameVolume", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "RenameVolume" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.RenameVolumeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_volume_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Volume", + "shortName": "rename_volume" + }, + "description": "Sample for RenameVolume", + "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_volume_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_volume_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.rename_volume", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameVolume", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "RenameVolume" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.RenameVolumeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_volume_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.Volume", + "shortName": "rename_volume" + }, + "description": "Sample for RenameVolume", + "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_volume_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_volume_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.reset_instance", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResetInstance", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "ResetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.ResetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reset_instance" + }, + "description": "Sample for ResetInstance", + "file": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResetInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_async.py" }, { "canonical": true, @@ -1878,22 +5480,22 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_volumes", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.reset_instance", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListVolumes", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResetInstance", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ListVolumes" + "shortName": "ResetInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListVolumesRequest" + "type": "google.cloud.bare_metal_solution_v2.types.ResetInstanceRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1909,22 +5511,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesPager", - "shortName": "list_volumes" + "resultType": "google.api_core.operation.Operation", + "shortName": "reset_instance" }, - "description": "Sample for ListVolumes", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_sync.py", + "description": "Sample for ResetInstance", + "file": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListVolumes_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResetInstance_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1939,17 +5541,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_sync.py" }, { "canonical": true, @@ -1959,24 +5561,28 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.reset_instance", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.resize_volume", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResetInstance", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResizeVolume", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ResetInstance" + "shortName": "ResizeVolume" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ResetInstanceRequest" + "type": "google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest" }, { - "name": "name", + "name": "volume", "type": "str" }, + { + "name": "size_gib", + "type": "int" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1991,13 +5597,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "reset_instance" + "shortName": "resize_volume" }, - "description": "Sample for ResetInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_async.py", + "description": "Sample for ResizeVolume", + "file": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResetInstance_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResizeVolume_async", "segments": [ { "end": 55, @@ -2030,7 +5636,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_async.py" }, { "canonical": true, @@ -2039,24 +5645,28 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.reset_instance", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.resize_volume", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResetInstance", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResizeVolume", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ResetInstance" + "shortName": "ResizeVolume" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ResetInstanceRequest" + "type": "google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest" }, { - "name": "name", + "name": "volume", "type": "str" }, + { + "name": "size_gib", + "type": "int" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2071,13 +5681,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "reset_instance" + "shortName": "resize_volume" }, - "description": "Sample for ResetInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_sync.py", + "description": "Sample for ResizeVolume", + "file": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResetInstance_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResizeVolume_sync", "segments": [ { "end": 55, @@ -2110,7 +5720,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_sync.py" }, { "canonical": true, @@ -2120,27 +5730,184 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.resize_volume", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.restore_volume_snapshot", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResizeVolume", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RestoreVolumeSnapshot", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ResizeVolume" + "shortName": "RestoreVolumeSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest" + "type": "google.cloud.bare_metal_solution_v2.types.RestoreVolumeSnapshotRequest" }, { - "name": "volume", + "name": "volume_snapshot", "type": "str" }, { - "name": "size_gib", - "type": "int" + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_volume_snapshot" + }, + "description": "Sample for RestoreVolumeSnapshot", + "file": "baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.restore_volume_snapshot", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RestoreVolumeSnapshot", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "RestoreVolumeSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.RestoreVolumeSnapshotRequest" + }, + { + "name": "volume_snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_volume_snapshot" + }, + "description": "Sample for RestoreVolumeSnapshot", + "file": "baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.start_instance", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StartInstance", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "StartInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.StartInstanceRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -2156,13 +5923,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "resize_volume" + "shortName": "start_instance" }, - "description": "Sample for ResizeVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_async.py", + "description": "Sample for StartInstance", + "file": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResizeVolume_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StartInstance_async", "segments": [ { "end": 55, @@ -2195,7 +5962,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_async.py" }, { "canonical": true, @@ -2204,28 +5971,24 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.resize_volume", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.start_instance", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResizeVolume", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StartInstance", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "ResizeVolume" + "shortName": "StartInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest" + "type": "google.cloud.bare_metal_solution_v2.types.StartInstanceRequest" }, { - "name": "volume", + "name": "name", "type": "str" }, - { - "name": "size_gib", - "type": "int" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2240,13 +6003,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "resize_volume" + "shortName": "start_instance" }, - "description": "Sample for ResizeVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_sync.py", + "description": "Sample for StartInstance", + "file": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResizeVolume_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StartInstance_sync", "segments": [ { "end": 55, @@ -2279,7 +6042,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_sync.py" }, { "canonical": true, @@ -2289,19 +6052,19 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.start_instance", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.stop_instance", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StartInstance", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StopInstance", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "StartInstance" + "shortName": "StopInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.StartInstanceRequest" + "type": "google.cloud.bare_metal_solution_v2.types.StopInstanceRequest" }, { "name": "name", @@ -2321,13 +6084,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "start_instance" + "shortName": "stop_instance" }, - "description": "Sample for StartInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_async.py", + "description": "Sample for StopInstance", + "file": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StartInstance_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StopInstance_async", "segments": [ { "end": 55, @@ -2360,7 +6123,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_async.py" }, { "canonical": true, @@ -2369,19 +6132,19 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.start_instance", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.stop_instance", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StartInstance", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StopInstance", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "StartInstance" + "shortName": "StopInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.StartInstanceRequest" + "type": "google.cloud.bare_metal_solution_v2.types.StopInstanceRequest" }, { "name": "name", @@ -2401,13 +6164,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "start_instance" + "shortName": "stop_instance" }, - "description": "Sample for StartInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_sync.py", + "description": "Sample for StopInstance", + "file": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StartInstance_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StopInstance_sync", "segments": [ { "end": 55, @@ -2440,7 +6203,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_sync.py" }, { "canonical": true, @@ -2450,24 +6213,28 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", "shortName": "BareMetalSolutionAsyncClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.stop_instance", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.submit_provisioning_config", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StopInstance", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.SubmitProvisioningConfig", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "StopInstance" + "shortName": "SubmitProvisioningConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.StopInstanceRequest" + "type": "google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "provisioning_config", + "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2481,22 +6248,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "stop_instance" + "resultType": "google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigResponse", + "shortName": "submit_provisioning_config" }, - "description": "Sample for StopInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_async.py", + "description": "Sample for SubmitProvisioningConfig", + "file": "baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StopInstance_async", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2511,17 +6278,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_async.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_async.py" }, { "canonical": true, @@ -2530,24 +6297,28 @@ "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", "shortName": "BareMetalSolutionClient" }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.stop_instance", + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.submit_provisioning_config", "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StopInstance", + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.SubmitProvisioningConfig", "service": { "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", "shortName": "BareMetalSolution" }, - "shortName": "StopInstance" + "shortName": "SubmitProvisioningConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.StopInstanceRequest" + "type": "google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "provisioning_config", + "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2561,22 +6332,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "stop_instance" + "resultType": "google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigResponse", + "shortName": "submit_provisioning_config" }, - "description": "Sample for StopInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_sync.py", + "description": "Sample for SubmitProvisioningConfig", + "file": "baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StopInstance_sync", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2591,17 +6362,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_sync.py" + "title": "baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_sync.py" }, { "canonical": true, @@ -3110,6 +6881,175 @@ ], "title": "baremetalsolution_v2_generated_bare_metal_solution_update_nfs_share_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", + "shortName": "BareMetalSolutionAsyncClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.update_provisioning_config", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateProvisioningConfig", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "UpdateProvisioningConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.UpdateProvisioningConfigRequest" + }, + { + "name": "provisioning_config", + "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", + "shortName": "update_provisioning_config" + }, + "description": "Sample for UpdateProvisioningConfig", + "file": "baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", + "shortName": "BareMetalSolutionClient" + }, + "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.update_provisioning_config", + "method": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateProvisioningConfig", + "service": { + "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", + "shortName": "BareMetalSolution" + }, + "shortName": "UpdateProvisioningConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bare_metal_solution_v2.types.UpdateProvisioningConfigRequest" + }, + { + "name": "provisioning_config", + "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", + "shortName": "update_provisioning_config" + }, + "description": "Sample for UpdateProvisioningConfig", + "file": "baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-bare-metal-solution/scripts/decrypt-secrets.sh b/packages/google-cloud-bare-metal-solution/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-bare-metal-solution/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-bare-metal-solution/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bare-metal-solution/scripts/fixup_bare_metal_solution_v2_keywords.py b/packages/google-cloud-bare-metal-solution/scripts/fixup_bare_metal_solution_v2_keywords.py index 58220f187590..baf65b27b19d 100644 --- a/packages/google-cloud-bare-metal-solution/scripts/fixup_bare_metal_solution_v2_keywords.py +++ b/packages/google-cloud-bare-metal-solution/scripts/fixup_bare_metal_solution_v2_keywords.py @@ -39,25 +39,49 @@ def partition( class bare_metal_solutionCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'detach_lun': ('instance', 'lun', ), + 'create_nfs_share': ('parent', 'nfs_share', ), + 'create_provisioning_config': ('parent', 'provisioning_config', 'email', ), + 'create_ssh_key': ('parent', 'ssh_key', 'ssh_key_id', ), + 'create_volume_snapshot': ('parent', 'volume_snapshot', ), + 'delete_nfs_share': ('name', ), + 'delete_ssh_key': ('name', ), + 'delete_volume_snapshot': ('name', ), + 'detach_lun': ('instance', 'lun', 'skip_reboot', ), + 'disable_interactive_serial_console': ('name', ), + 'enable_interactive_serial_console': ('name', ), + 'evict_lun': ('name', ), + 'evict_volume': ('name', ), 'get_instance': ('name', ), 'get_lun': ('name', ), 'get_network': ('name', ), 'get_nfs_share': ('name', ), + 'get_provisioning_config': ('name', ), 'get_volume': ('name', ), + 'get_volume_snapshot': ('name', ), 'list_instances': ('parent', 'page_size', 'page_token', 'filter', ), 'list_luns': ('parent', 'page_size', 'page_token', ), 'list_networks': ('parent', 'page_size', 'page_token', 'filter', ), 'list_network_usage': ('location', ), 'list_nfs_shares': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_os_images': ('parent', 'page_size', 'page_token', ), + 'list_provisioning_quotas': ('parent', 'page_size', 'page_token', ), + 'list_ssh_keys': ('parent', 'page_size', 'page_token', ), 'list_volumes': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_volume_snapshots': ('parent', 'page_size', 'page_token', ), + 'rename_instance': ('name', 'new_instance_id', ), + 'rename_network': ('name', 'new_network_id', ), + 'rename_nfs_share': ('name', 'new_nfsshare_id', ), + 'rename_volume': ('name', 'new_volume_id', ), 'reset_instance': ('name', ), 'resize_volume': ('volume', 'size_gib', ), + 'restore_volume_snapshot': ('volume_snapshot', ), 'start_instance': ('name', ), 'stop_instance': ('name', ), + 'submit_provisioning_config': ('parent', 'provisioning_config', 'email', ), 'update_instance': ('instance', 'update_mask', ), 'update_network': ('network', 'update_mask', ), 'update_nfs_share': ('nfs_share', 'update_mask', ), + 'update_provisioning_config': ('provisioning_config', 'update_mask', 'email', ), 'update_volume': ('volume', 'update_mask', ), } diff --git a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py index c2527593d4dc..c82e3b2e08f4 100644 --- a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py +++ b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py @@ -47,6 +47,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore @@ -65,15 +66,22 @@ transports, ) from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share -from google.cloud.bare_metal_solution_v2.types import baremetalsolution +from google.cloud.bare_metal_solution_v2.types import ( + volume_snapshot as gcb_volume_snapshot, +) +from google.cloud.bare_metal_solution_v2.types import baremetalsolution, common from google.cloud.bare_metal_solution_v2.types import instance from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance from google.cloud.bare_metal_solution_v2.types import lun from google.cloud.bare_metal_solution_v2.types import network from google.cloud.bare_metal_solution_v2.types import network as gcb_network from google.cloud.bare_metal_solution_v2.types import nfs_share +from google.cloud.bare_metal_solution_v2.types import osimage, provisioning +from google.cloud.bare_metal_solution_v2.types import ssh_key +from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key from google.cloud.bare_metal_solution_v2.types import volume from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume +from google.cloud.bare_metal_solution_v2.types import volume_snapshot def client_cert_source_callback(): @@ -1213,6 +1221,9 @@ def test_get_instance(request_type, transport: str = "grpc"): os_image="os_image_value", pod="pod_value", network_template="network_template_value", + login_info="login_info_value", + workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, + firmware_version="firmware_version_value", ) response = client.get_instance(request) @@ -1232,6 +1243,9 @@ def test_get_instance(request_type, transport: str = "grpc"): assert response.os_image == "os_image_value" assert response.pod == "pod_value" assert response.network_template == "network_template_value" + assert response.login_info == "login_info_value" + assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC + assert response.firmware_version == "firmware_version_value" def test_get_instance_empty_call(): @@ -1277,6 +1291,9 @@ async def test_get_instance_async( os_image="os_image_value", pod="pod_value", network_template="network_template_value", + login_info="login_info_value", + workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, + firmware_version="firmware_version_value", ) ) response = await client.get_instance(request) @@ -1297,6 +1314,9 @@ async def test_get_instance_async( assert response.os_image == "os_image_value" assert response.pod == "pod_value" assert response.network_template == "network_template_value" + assert response.login_info == "login_info_value" + assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC + assert response.firmware_version == "firmware_version_value" @pytest.mark.asyncio @@ -1679,6 +1699,288 @@ async def test_update_instance_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + instance.RenameInstanceRequest, + dict, + ], +) +def test_rename_instance(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = instance.Instance( + name="name_value", + id="id_value", + machine_type="machine_type_value", + state=instance.Instance.State.PROVISIONING, + hyperthreading_enabled=True, + interactive_serial_console_enabled=True, + os_image="os_image_value", + pod="pod_value", + network_template="network_template_value", + login_info="login_info_value", + workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, + firmware_version="firmware_version_value", + ) + response = client.rename_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == instance.RenameInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, instance.Instance) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.machine_type == "machine_type_value" + assert response.state == instance.Instance.State.PROVISIONING + assert response.hyperthreading_enabled is True + assert response.interactive_serial_console_enabled is True + assert response.os_image == "os_image_value" + assert response.pod == "pod_value" + assert response.network_template == "network_template_value" + assert response.login_info == "login_info_value" + assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC + assert response.firmware_version == "firmware_version_value" + + +def test_rename_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_instance), "__call__") as call: + client.rename_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == instance.RenameInstanceRequest() + + +@pytest.mark.asyncio +async def test_rename_instance_async( + transport: str = "grpc_asyncio", request_type=instance.RenameInstanceRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance.Instance( + name="name_value", + id="id_value", + machine_type="machine_type_value", + state=instance.Instance.State.PROVISIONING, + hyperthreading_enabled=True, + interactive_serial_console_enabled=True, + os_image="os_image_value", + pod="pod_value", + network_template="network_template_value", + login_info="login_info_value", + workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, + firmware_version="firmware_version_value", + ) + ) + response = await client.rename_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == instance.RenameInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, instance.Instance) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.machine_type == "machine_type_value" + assert response.state == instance.Instance.State.PROVISIONING + assert response.hyperthreading_enabled is True + assert response.interactive_serial_console_enabled is True + assert response.os_image == "os_image_value" + assert response.pod == "pod_value" + assert response.network_template == "network_template_value" + assert response.login_info == "login_info_value" + assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC + assert response.firmware_version == "firmware_version_value" + + +@pytest.mark.asyncio +async def test_rename_instance_async_from_dict(): + await test_rename_instance_async(request_type=dict) + + +def test_rename_instance_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = instance.RenameInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_instance), "__call__") as call: + call.return_value = instance.Instance() + client.rename_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rename_instance_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = instance.RenameInstanceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance()) + await client.rename_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_rename_instance_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = instance.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rename_instance( + name="name_value", + new_instance_id="new_instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].new_instance_id + mock_val = "new_instance_id_value" + assert arg == mock_val + + +def test_rename_instance_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_instance( + instance.RenameInstanceRequest(), + name="name_value", + new_instance_id="new_instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_rename_instance_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = instance.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rename_instance( + name="name_value", + new_instance_id="new_instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].new_instance_id + mock_val = "new_instance_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_rename_instance_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rename_instance( + instance.RenameInstanceRequest(), + name="name_value", + new_instance_id="new_instance_id_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2360,11 +2662,11 @@ async def test_stop_instance_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - gcb_instance.DetachLunRequest, + instance.EnableInteractiveSerialConsoleRequest, dict, ], ) -def test_detach_lun(request_type, transport: str = "grpc"): +def test_enable_interactive_serial_console(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2375,21 +2677,23 @@ def test_detach_lun(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: + with mock.patch.object( + type(client.transport.enable_interactive_serial_console), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.detach_lun(request) + response = client.enable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == gcb_instance.DetachLunRequest() + assert args[0] == instance.EnableInteractiveSerialConsoleRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_detach_lun_empty_call(): +def test_enable_interactive_serial_console_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( @@ -2398,16 +2702,19 @@ def test_detach_lun_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: - client.detach_lun() + with mock.patch.object( + type(client.transport.enable_interactive_serial_console), "__call__" + ) as call: + client.enable_interactive_serial_console() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == gcb_instance.DetachLunRequest() + assert args[0] == instance.EnableInteractiveSerialConsoleRequest() @pytest.mark.asyncio -async def test_detach_lun_async( - transport: str = "grpc_asyncio", request_type=gcb_instance.DetachLunRequest +async def test_enable_interactive_serial_console_async( + transport: str = "grpc_asyncio", + request_type=instance.EnableInteractiveSerialConsoleRequest, ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2419,42 +2726,46 @@ async def test_detach_lun_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: + with mock.patch.object( + type(client.transport.enable_interactive_serial_console), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.detach_lun(request) + response = await client.enable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == gcb_instance.DetachLunRequest() + assert args[0] == instance.EnableInteractiveSerialConsoleRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_detach_lun_async_from_dict(): - await test_detach_lun_async(request_type=dict) +async def test_enable_interactive_serial_console_async_from_dict(): + await test_enable_interactive_serial_console_async(request_type=dict) -def test_detach_lun_field_headers(): +def test_enable_interactive_serial_console_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcb_instance.DetachLunRequest() + request = instance.EnableInteractiveSerialConsoleRequest() - request.instance = "instance_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: + with mock.patch.object( + type(client.transport.enable_interactive_serial_console), "__call__" + ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.detach_lun(request) + client.enable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2465,28 +2776,30 @@ def test_detach_lun_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "instance=instance_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_detach_lun_field_headers_async(): +async def test_enable_interactive_serial_console_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcb_instance.DetachLunRequest() + request = instance.EnableInteractiveSerialConsoleRequest() - request.instance = "instance_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: + with mock.patch.object( + type(client.transport.enable_interactive_serial_console), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.detach_lun(request) + await client.enable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2497,39 +2810,37 @@ async def test_detach_lun_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "instance=instance_value", + "name=name_value", ) in kw["metadata"] -def test_detach_lun_flattened(): +def test_enable_interactive_serial_console_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: + with mock.patch.object( + type(client.transport.enable_interactive_serial_console), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.detach_lun( - instance="instance_value", - lun="lun_value", + client.enable_interactive_serial_console( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = "instance_value" - assert arg == mock_val - arg = args[0].lun - mock_val = "lun_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_detach_lun_flattened_error(): +def test_enable_interactive_serial_console_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2537,21 +2848,22 @@ def test_detach_lun_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.detach_lun( - gcb_instance.DetachLunRequest(), - instance="instance_value", - lun="lun_value", + client.enable_interactive_serial_console( + instance.EnableInteractiveSerialConsoleRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_detach_lun_flattened_async(): +async def test_enable_interactive_serial_console_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: + with mock.patch.object( + type(client.transport.enable_interactive_serial_console), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2560,25 +2872,21 @@ async def test_detach_lun_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.detach_lun( - instance="instance_value", - lun="lun_value", + response = await client.enable_interactive_serial_console( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = "instance_value" - assert arg == mock_val - arg = args[0].lun - mock_val = "lun_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_detach_lun_flattened_error_async(): +async def test_enable_interactive_serial_console_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2586,21 +2894,20 @@ async def test_detach_lun_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.detach_lun( - gcb_instance.DetachLunRequest(), - instance="instance_value", - lun="lun_value", + await client.enable_interactive_serial_console( + instance.EnableInteractiveSerialConsoleRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - volume.ListVolumesRequest, + instance.DisableInteractiveSerialConsoleRequest, dict, ], ) -def test_list_volumes(request_type, transport: str = "grpc"): +def test_disable_interactive_serial_console(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2611,26 +2918,23 @@ def test_list_volumes(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: + with mock.patch.object( + type(client.transport.disable_interactive_serial_console), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = volume.ListVolumesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_volumes(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.disable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == volume.ListVolumesRequest() + assert args[0] == instance.DisableInteractiveSerialConsoleRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVolumesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_volumes_empty_call(): +def test_disable_interactive_serial_console_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( @@ -2639,16 +2943,19 @@ def test_list_volumes_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: - client.list_volumes() + with mock.patch.object( + type(client.transport.disable_interactive_serial_console), "__call__" + ) as call: + client.disable_interactive_serial_console() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == volume.ListVolumesRequest() + assert args[0] == instance.DisableInteractiveSerialConsoleRequest() @pytest.mark.asyncio -async def test_list_volumes_async( - transport: str = "grpc_asyncio", request_type=volume.ListVolumesRequest +async def test_disable_interactive_serial_console_async( + transport: str = "grpc_asyncio", + request_type=instance.DisableInteractiveSerialConsoleRequest, ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2660,47 +2967,46 @@ async def test_list_volumes_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: + with mock.patch.object( + type(client.transport.disable_interactive_serial_console), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - volume.ListVolumesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_volumes(request) + response = await client.disable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == volume.ListVolumesRequest() + assert args[0] == instance.DisableInteractiveSerialConsoleRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVolumesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_volumes_async_from_dict(): - await test_list_volumes_async(request_type=dict) +async def test_disable_interactive_serial_console_async_from_dict(): + await test_disable_interactive_serial_console_async(request_type=dict) -def test_list_volumes_field_headers(): +def test_disable_interactive_serial_console_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = volume.ListVolumesRequest() + request = instance.DisableInteractiveSerialConsoleRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: - call.return_value = volume.ListVolumesResponse() - client.list_volumes(request) + with mock.patch.object( + type(client.transport.disable_interactive_serial_console), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.disable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2711,28 +3017,30 @@ def test_list_volumes_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_volumes_field_headers_async(): +async def test_disable_interactive_serial_console_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = volume.ListVolumesRequest() + request = instance.DisableInteractiveSerialConsoleRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: + with mock.patch.object( + type(client.transport.disable_interactive_serial_console), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - volume.ListVolumesResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_volumes(request) + await client.disable_interactive_serial_console(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2743,35 +3051,37 @@ async def test_list_volumes_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_volumes_flattened(): +def test_disable_interactive_serial_console_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: + with mock.patch.object( + type(client.transport.disable_interactive_serial_console), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = volume.ListVolumesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_volumes( - parent="parent_value", + client.disable_interactive_serial_console( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_volumes_flattened_error(): +def test_disable_interactive_serial_console_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2779,43 +3089,45 @@ def test_list_volumes_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_volumes( - volume.ListVolumesRequest(), - parent="parent_value", + client.disable_interactive_serial_console( + instance.DisableInteractiveSerialConsoleRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_volumes_flattened_async(): +async def test_disable_interactive_serial_console_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: + with mock.patch.object( + type(client.transport.disable_interactive_serial_console), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = volume.ListVolumesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - volume.ListVolumesResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_volumes( - parent="parent_value", + response = await client.disable_interactive_serial_console( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_volumes_flattened_error_async(): +async def test_disable_interactive_serial_console_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2823,281 +3135,63 @@ async def test_list_volumes_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_volumes( - volume.ListVolumesRequest(), - parent="parent_value", + await client.disable_interactive_serial_console( + instance.DisableInteractiveSerialConsoleRequest(), + name="name_value", ) -def test_list_volumes_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + gcb_instance.DetachLunRequest, + dict, + ], +) +def test_detach_lun(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), - ], - next_page_token="abc", - ), - volume.ListVolumesResponse( - volumes=[], - next_page_token="def", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - ], - next_page_token="ghi", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - ], - ), - RuntimeError, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_volumes(request={}) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.detach_lun(request) - assert pager._metadata == metadata + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_instance.DetachLunRequest() - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, volume.Volume) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_volumes_pages(transport_name: str = "grpc"): +def test_detach_lun_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), - ], - next_page_token="abc", - ), - volume.ListVolumesResponse( - volumes=[], - next_page_token="def", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - ], - next_page_token="ghi", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - ], - ), - RuntimeError, - ) - pages = list(client.list_volumes(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_volumes_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), - ], - next_page_token="abc", - ), - volume.ListVolumesResponse( - volumes=[], - next_page_token="def", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - ], - next_page_token="ghi", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_volumes( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, volume.Volume) for i in responses) - - -@pytest.mark.asyncio -async def test_list_volumes_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), - ], - next_page_token="abc", - ), - volume.ListVolumesResponse( - volumes=[], - next_page_token="def", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - ], - next_page_token="ghi", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_volumes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - volume.GetVolumeRequest, - dict, - ], -) -def test_get_volume(request_type, transport: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_volume), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = volume.Volume( - name="name_value", - id="id_value", - storage_type=volume.Volume.StorageType.SSD, - state=volume.Volume.State.CREATING, - requested_size_gib=1917, - current_size_gib=1710, - emergency_size_gib=1898, - auto_grown_size_gib=2032, - remaining_space_gib=1974, - snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, - snapshot_enabled=True, - pod="pod_value", - ) - response = client.get_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == volume.GetVolumeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, volume.Volume) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.storage_type == volume.Volume.StorageType.SSD - assert response.state == volume.Volume.State.CREATING - assert response.requested_size_gib == 1917 - assert response.current_size_gib == 1710 - assert response.emergency_size_gib == 1898 - assert response.auto_grown_size_gib == 2032 - assert response.remaining_space_gib == 1974 - assert ( - response.snapshot_auto_delete_behavior - == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED - ) - assert response.snapshot_enabled is True - assert response.pod == "pod_value" - - -def test_get_volume_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_volume), "__call__") as call: - client.get_volume() + with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: + client.detach_lun() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == volume.GetVolumeRequest() + assert args[0] == gcb_instance.DetachLunRequest() @pytest.mark.asyncio -async def test_get_volume_async( - transport: str = "grpc_asyncio", request_type=volume.GetVolumeRequest +async def test_detach_lun_async( + transport: str = "grpc_asyncio", request_type=gcb_instance.DetachLunRequest ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3109,70 +3203,42 @@ async def test_get_volume_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_volume), "__call__") as call: + with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - volume.Volume( - name="name_value", - id="id_value", - storage_type=volume.Volume.StorageType.SSD, - state=volume.Volume.State.CREATING, - requested_size_gib=1917, - current_size_gib=1710, - emergency_size_gib=1898, - auto_grown_size_gib=2032, - remaining_space_gib=1974, - snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, - snapshot_enabled=True, - pod="pod_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_volume(request) + response = await client.detach_lun(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == volume.GetVolumeRequest() + assert args[0] == gcb_instance.DetachLunRequest() # Establish that the response is the type that we expect. - assert isinstance(response, volume.Volume) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.storage_type == volume.Volume.StorageType.SSD - assert response.state == volume.Volume.State.CREATING - assert response.requested_size_gib == 1917 - assert response.current_size_gib == 1710 - assert response.emergency_size_gib == 1898 - assert response.auto_grown_size_gib == 2032 - assert response.remaining_space_gib == 1974 - assert ( - response.snapshot_auto_delete_behavior - == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED - ) - assert response.snapshot_enabled is True - assert response.pod == "pod_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_volume_async_from_dict(): - await test_get_volume_async(request_type=dict) +async def test_detach_lun_async_from_dict(): + await test_detach_lun_async(request_type=dict) -def test_get_volume_field_headers(): +def test_detach_lun_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = volume.GetVolumeRequest() + request = gcb_instance.DetachLunRequest() - request.name = "name_value" + request.instance = "instance_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_volume), "__call__") as call: - call.return_value = volume.Volume() - client.get_volume(request) + with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.detach_lun(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3183,65 +3249,71 @@ def test_get_volume_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "instance=instance_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_volume_field_headers_async(): +async def test_detach_lun_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = volume.GetVolumeRequest() + request = gcb_instance.DetachLunRequest() - request.name = "name_value" + request.instance = "instance_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_volume), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume()) - await client.get_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] + with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.detach_lun(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "instance=instance_value", ) in kw["metadata"] -def test_get_volume_flattened(): +def test_detach_lun_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_volume), "__call__") as call: + with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = volume.Volume() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_volume( - name="name_value", + client.detach_lun( + instance="instance_value", + lun="lun_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].lun + mock_val = "lun_value" assert arg == mock_val -def test_get_volume_flattened_error(): +def test_detach_lun_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3249,41 +3321,48 @@ def test_get_volume_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_volume( - volume.GetVolumeRequest(), - name="name_value", + client.detach_lun( + gcb_instance.DetachLunRequest(), + instance="instance_value", + lun="lun_value", ) @pytest.mark.asyncio -async def test_get_volume_flattened_async(): +async def test_detach_lun_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_volume), "__call__") as call: + with mock.patch.object(type(client.transport.detach_lun), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = volume.Volume() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_volume( - name="name_value", + response = await client.detach_lun( + instance="instance_value", + lun="lun_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].instance + mock_val = "instance_value" + assert arg == mock_val + arg = args[0].lun + mock_val = "lun_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_volume_flattened_error_async(): +async def test_detach_lun_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3291,20 +3370,21 @@ async def test_get_volume_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_volume( - volume.GetVolumeRequest(), - name="name_value", + await client.detach_lun( + gcb_instance.DetachLunRequest(), + instance="instance_value", + lun="lun_value", ) @pytest.mark.parametrize( "request_type", [ - gcb_volume.UpdateVolumeRequest, + ssh_key.ListSSHKeysRequest, dict, ], ) -def test_update_volume(request_type, transport: str = "grpc"): +def test_list_ssh_keys(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3315,21 +3395,24 @@ def test_update_volume(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_volume), "__call__") as call: + with mock.patch.object(type(client.transport.list_ssh_keys), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_volume(request) + call.return_value = ssh_key.ListSSHKeysResponse( + next_page_token="next_page_token_value", + ) + response = client.list_ssh_keys(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == gcb_volume.UpdateVolumeRequest() + assert args[0] == ssh_key.ListSSHKeysRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListSSHKeysPager) + assert response.next_page_token == "next_page_token_value" -def test_update_volume_empty_call(): +def test_list_ssh_keys_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( @@ -3338,16 +3421,16 @@ def test_update_volume_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_volume), "__call__") as call: - client.update_volume() + with mock.patch.object(type(client.transport.list_ssh_keys), "__call__") as call: + client.list_ssh_keys() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == gcb_volume.UpdateVolumeRequest() + assert args[0] == ssh_key.ListSSHKeysRequest() @pytest.mark.asyncio -async def test_update_volume_async( - transport: str = "grpc_asyncio", request_type=gcb_volume.UpdateVolumeRequest +async def test_list_ssh_keys_async( + transport: str = "grpc_asyncio", request_type=ssh_key.ListSSHKeysRequest ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3359,42 +3442,45 @@ async def test_update_volume_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_volume), "__call__") as call: + with mock.patch.object(type(client.transport.list_ssh_keys), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + ssh_key.ListSSHKeysResponse( + next_page_token="next_page_token_value", + ) ) - response = await client.update_volume(request) + response = await client.list_ssh_keys(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == gcb_volume.UpdateVolumeRequest() + assert args[0] == ssh_key.ListSSHKeysRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListSSHKeysAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_update_volume_async_from_dict(): - await test_update_volume_async(request_type=dict) +async def test_list_ssh_keys_async_from_dict(): + await test_list_ssh_keys_async(request_type=dict) -def test_update_volume_field_headers(): +def test_list_ssh_keys_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcb_volume.UpdateVolumeRequest() + request = ssh_key.ListSSHKeysRequest() - request.volume.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_volume), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_volume(request) + with mock.patch.object(type(client.transport.list_ssh_keys), "__call__") as call: + call.return_value = ssh_key.ListSSHKeysResponse() + client.list_ssh_keys(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3405,28 +3491,28 @@ def test_update_volume_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "volume.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_volume_field_headers_async(): +async def test_list_ssh_keys_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcb_volume.UpdateVolumeRequest() + request = ssh_key.ListSSHKeysRequest() - request.volume.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_volume), "__call__") as call: + with mock.patch.object(type(client.transport.list_ssh_keys), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + ssh_key.ListSSHKeysResponse() ) - await client.update_volume(request) + await client.list_ssh_keys(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3437,39 +3523,35 @@ async def test_update_volume_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "volume.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_volume_flattened(): +def test_list_ssh_keys_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_volume), "__call__") as call: + with mock.patch.object(type(client.transport.list_ssh_keys), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = ssh_key.ListSSHKeysResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_volume( - volume=gcb_volume.Volume(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_ssh_keys( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].volume - mock_val = gcb_volume.Volume(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_volume_flattened_error(): +def test_list_ssh_keys_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3477,48 +3559,43 @@ def test_update_volume_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_volume( - gcb_volume.UpdateVolumeRequest(), - volume=gcb_volume.Volume(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_ssh_keys( + ssh_key.ListSSHKeysRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_volume_flattened_async(): +async def test_list_ssh_keys_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_volume), "__call__") as call: + with mock.patch.object(type(client.transport.list_ssh_keys), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = ssh_key.ListSSHKeysResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + ssh_key.ListSSHKeysResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_volume( - volume=gcb_volume.Volume(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_ssh_keys( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].volume - mock_val = gcb_volume.Volume(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_volume_flattened_error_async(): +async def test_list_ssh_keys_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3526,111 +3603,310 @@ async def test_update_volume_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_volume( - gcb_volume.UpdateVolumeRequest(), - volume=gcb_volume.Volume(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.list_ssh_keys( + ssh_key.ListSSHKeysRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - gcb_volume.ResizeVolumeRequest, - dict, - ], -) -def test_resize_volume(request_type, transport: str = "grpc"): +def test_list_ssh_keys_pager(transport_name: str = "grpc"): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.resize_volume(request) + with mock.patch.object(type(client.transport.list_ssh_keys), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ], + next_page_token="abc", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[], + next_page_token="def", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ], + next_page_token="ghi", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ], + ), + RuntimeError, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_volume.ResizeVolumeRequest() + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_ssh_keys(request={}) - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, ssh_key.SSHKey) for i in results) -def test_resize_volume_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_ssh_keys_pages(transport_name: str = "grpc"): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: - client.resize_volume() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_volume.ResizeVolumeRequest() + with mock.patch.object(type(client.transport.list_ssh_keys), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ], + next_page_token="abc", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[], + next_page_token="def", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ], + next_page_token="ghi", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ], + ), + RuntimeError, + ) + pages = list(client.list_ssh_keys(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_resize_volume_async( - transport: str = "grpc_asyncio", request_type=gcb_volume.ResizeVolumeRequest -): +async def test_list_ssh_keys_async_pager(): client = BareMetalSolutionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.resize_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_volume.ResizeVolumeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - + with mock.patch.object( + type(client.transport.list_ssh_keys), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ], + next_page_token="abc", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[], + next_page_token="def", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ], + next_page_token="ghi", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_ssh_keys( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, ssh_key.SSHKey) for i in responses) + + @pytest.mark.asyncio -async def test_resize_volume_async_from_dict(): - await test_resize_volume_async(request_type=dict) +async def test_list_ssh_keys_async_pages(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_ssh_keys), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ], + next_page_token="abc", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[], + next_page_token="def", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ], + next_page_token="ghi", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_ssh_keys(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_resize_volume_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + gcb_ssh_key.CreateSSHKeyRequest, + dict, + ], +) +def test_create_ssh_key(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_ssh_key), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_ssh_key.SSHKey( + name="name_value", + public_key="public_key_value", + ) + response = client.create_ssh_key(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_ssh_key.CreateSSHKeyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_ssh_key.SSHKey) + assert response.name == "name_value" + assert response.public_key == "public_key_value" + + +def test_create_ssh_key_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_ssh_key), "__call__") as call: + client.create_ssh_key() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_ssh_key.CreateSSHKeyRequest() + + +@pytest.mark.asyncio +async def test_create_ssh_key_async( + transport: str = "grpc_asyncio", request_type=gcb_ssh_key.CreateSSHKeyRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_ssh_key), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_ssh_key.SSHKey( + name="name_value", + public_key="public_key_value", + ) + ) + response = await client.create_ssh_key(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_ssh_key.CreateSSHKeyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_ssh_key.SSHKey) + assert response.name == "name_value" + assert response.public_key == "public_key_value" + + +@pytest.mark.asyncio +async def test_create_ssh_key_async_from_dict(): + await test_create_ssh_key_async(request_type=dict) + + +def test_create_ssh_key_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcb_volume.ResizeVolumeRequest() + request = gcb_ssh_key.CreateSSHKeyRequest() - request.volume = "volume_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.resize_volume(request) + with mock.patch.object(type(client.transport.create_ssh_key), "__call__") as call: + call.return_value = gcb_ssh_key.SSHKey() + client.create_ssh_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3641,28 +3917,26 @@ def test_resize_volume_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "volume=volume_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_resize_volume_field_headers_async(): +async def test_create_ssh_key_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcb_volume.ResizeVolumeRequest() + request = gcb_ssh_key.CreateSSHKeyRequest() - request.volume = "volume_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.resize_volume(request) + with mock.patch.object(type(client.transport.create_ssh_key), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_ssh_key.SSHKey()) + await client.create_ssh_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3673,39 +3947,43 @@ async def test_resize_volume_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "volume=volume_value", + "parent=parent_value", ) in kw["metadata"] -def test_resize_volume_flattened(): +def test_create_ssh_key_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: + with mock.patch.object(type(client.transport.create_ssh_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = gcb_ssh_key.SSHKey() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.resize_volume( - volume="volume_value", - size_gib=844, + client.create_ssh_key( + parent="parent_value", + ssh_key=gcb_ssh_key.SSHKey(name="name_value"), + ssh_key_id="ssh_key_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].volume - mock_val = "volume_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val - arg = args[0].size_gib - mock_val = 844 + arg = args[0].ssh_key + mock_val = gcb_ssh_key.SSHKey(name="name_value") + assert arg == mock_val + arg = args[0].ssh_key_id + mock_val = "ssh_key_id_value" assert arg == mock_val -def test_resize_volume_flattened_error(): +def test_create_ssh_key_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3713,48 +3991,51 @@ def test_resize_volume_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.resize_volume( - gcb_volume.ResizeVolumeRequest(), - volume="volume_value", - size_gib=844, + client.create_ssh_key( + gcb_ssh_key.CreateSSHKeyRequest(), + parent="parent_value", + ssh_key=gcb_ssh_key.SSHKey(name="name_value"), + ssh_key_id="ssh_key_id_value", ) @pytest.mark.asyncio -async def test_resize_volume_flattened_async(): +async def test_create_ssh_key_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: + with mock.patch.object(type(client.transport.create_ssh_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = gcb_ssh_key.SSHKey() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_ssh_key.SSHKey()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.resize_volume( - volume="volume_value", - size_gib=844, + response = await client.create_ssh_key( + parent="parent_value", + ssh_key=gcb_ssh_key.SSHKey(name="name_value"), + ssh_key_id="ssh_key_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].volume - mock_val = "volume_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val - arg = args[0].size_gib - mock_val = 844 + arg = args[0].ssh_key + mock_val = gcb_ssh_key.SSHKey(name="name_value") + assert arg == mock_val + arg = args[0].ssh_key_id + mock_val = "ssh_key_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_resize_volume_flattened_error_async(): +async def test_create_ssh_key_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3762,21 +4043,22 @@ async def test_resize_volume_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.resize_volume( - gcb_volume.ResizeVolumeRequest(), - volume="volume_value", - size_gib=844, + await client.create_ssh_key( + gcb_ssh_key.CreateSSHKeyRequest(), + parent="parent_value", + ssh_key=gcb_ssh_key.SSHKey(name="name_value"), + ssh_key_id="ssh_key_id_value", ) @pytest.mark.parametrize( "request_type", [ - network.ListNetworksRequest, + ssh_key.DeleteSSHKeyRequest, dict, ], ) -def test_list_networks(request_type, transport: str = "grpc"): +def test_delete_ssh_key(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3787,26 +4069,21 @@ def test_list_networks(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_networks), "__call__") as call: + with mock.patch.object(type(client.transport.delete_ssh_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = network.ListNetworksResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_networks(request) + call.return_value = None + response = client.delete_ssh_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == network.ListNetworksRequest() + assert args[0] == ssh_key.DeleteSSHKeyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNetworksPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response is None -def test_list_networks_empty_call(): +def test_delete_ssh_key_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( @@ -3815,16 +4092,16 @@ def test_list_networks_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_networks), "__call__") as call: - client.list_networks() + with mock.patch.object(type(client.transport.delete_ssh_key), "__call__") as call: + client.delete_ssh_key() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == network.ListNetworksRequest() + assert args[0] == ssh_key.DeleteSSHKeyRequest() @pytest.mark.asyncio -async def test_list_networks_async( - transport: str = "grpc_asyncio", request_type=network.ListNetworksRequest +async def test_delete_ssh_key_async( + transport: str = "grpc_asyncio", request_type=ssh_key.DeleteSSHKeyRequest ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3836,47 +4113,40 @@ async def test_list_networks_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_networks), "__call__") as call: + with mock.patch.object(type(client.transport.delete_ssh_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - network.ListNetworksResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - response = await client.list_networks(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_ssh_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == network.ListNetworksRequest() + assert args[0] == ssh_key.DeleteSSHKeyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNetworksAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response is None @pytest.mark.asyncio -async def test_list_networks_async_from_dict(): - await test_list_networks_async(request_type=dict) +async def test_delete_ssh_key_async_from_dict(): + await test_delete_ssh_key_async(request_type=dict) -def test_list_networks_field_headers(): +def test_delete_ssh_key_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = network.ListNetworksRequest() + request = ssh_key.DeleteSSHKeyRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_networks), "__call__") as call: - call.return_value = network.ListNetworksResponse() - client.list_networks(request) + with mock.patch.object(type(client.transport.delete_ssh_key), "__call__") as call: + call.return_value = None + client.delete_ssh_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3887,28 +4157,26 @@ def test_list_networks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_networks_field_headers_async(): +async def test_delete_ssh_key_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = network.ListNetworksRequest() + request = ssh_key.DeleteSSHKeyRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_networks), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - network.ListNetworksResponse() - ) - await client.list_networks(request) + with mock.patch.object(type(client.transport.delete_ssh_key), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_ssh_key(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3919,35 +4187,35 @@ async def test_list_networks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_networks_flattened(): +def test_delete_ssh_key_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_networks), "__call__") as call: + with mock.patch.object(type(client.transport.delete_ssh_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = network.ListNetworksResponse() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_networks( - parent="parent_value", + client.delete_ssh_key( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_networks_flattened_error(): +def test_delete_ssh_key_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3955,43 +4223,41 @@ def test_list_networks_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_networks( - network.ListNetworksRequest(), - parent="parent_value", + client.delete_ssh_key( + ssh_key.DeleteSSHKeyRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_networks_flattened_async(): +async def test_delete_ssh_key_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_networks), "__call__") as call: + with mock.patch.object(type(client.transport.delete_ssh_key), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = network.ListNetworksResponse() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - network.ListNetworksResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_networks( - parent="parent_value", + response = await client.delete_ssh_key( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_networks_flattened_error_async(): +async def test_delete_ssh_key_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3999,210 +4265,20 @@ async def test_list_networks_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_networks( - network.ListNetworksRequest(), - parent="parent_value", - ) - - -def test_list_networks_pager(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_networks), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - network.Network(), - ], - next_page_token="abc", - ), - network.ListNetworksResponse( - networks=[], - next_page_token="def", - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - ], - next_page_token="ghi", - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_networks(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, network.Network) for i in results) - - -def test_list_networks_pages(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_networks), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - network.Network(), - ], - next_page_token="abc", - ), - network.ListNetworksResponse( - networks=[], - next_page_token="def", - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - ], - next_page_token="ghi", - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - ], - ), - RuntimeError, - ) - pages = list(client.list_networks(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_networks_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - network.Network(), - ], - next_page_token="abc", - ), - network.ListNetworksResponse( - networks=[], - next_page_token="def", - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - ], - next_page_token="ghi", - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_networks( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, network.Network) for i in responses) - - -@pytest.mark.asyncio -async def test_list_networks_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - network.Network(), - ], - next_page_token="abc", - ), - network.ListNetworksResponse( - networks=[], - next_page_token="def", - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - ], - next_page_token="ghi", - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - ], - ), - RuntimeError, + await client.delete_ssh_key( + ssh_key.DeleteSSHKeyRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_networks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - network.ListNetworkUsageRequest, + volume.ListVolumesRequest, dict, ], ) -def test_list_network_usage(request_type, transport: str = "grpc"): +def test_list_volumes(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4213,23 +4289,26 @@ def test_list_network_usage(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = network.ListNetworkUsageResponse() - response = client.list_network_usage(request) + call.return_value = volume.ListVolumesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_volumes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == network.ListNetworkUsageRequest() + assert args[0] == volume.ListVolumesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, network.ListNetworkUsageResponse) + assert isinstance(response, pagers.ListVolumesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_network_usage_empty_call(): +def test_list_volumes_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( @@ -4238,18 +4317,16 @@ def test_list_network_usage_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), "__call__" - ) as call: - client.list_network_usage() + with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: + client.list_volumes() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == network.ListNetworkUsageRequest() + assert args[0] == volume.ListVolumesRequest() @pytest.mark.asyncio -async def test_list_network_usage_async( - transport: str = "grpc_asyncio", request_type=network.ListNetworkUsageRequest +async def test_list_volumes_async( + transport: str = "grpc_asyncio", request_type=volume.ListVolumesRequest ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4261,46 +4338,47 @@ async def test_list_network_usage_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - network.ListNetworkUsageResponse() + volume.ListVolumesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.list_network_usage(request) + response = await client.list_volumes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == network.ListNetworkUsageRequest() + assert args[0] == volume.ListVolumesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, network.ListNetworkUsageResponse) + assert isinstance(response, pagers.ListVolumesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_network_usage_async_from_dict(): - await test_list_network_usage_async(request_type=dict) +async def test_list_volumes_async_from_dict(): + await test_list_volumes_async(request_type=dict) -def test_list_network_usage_field_headers(): +def test_list_volumes_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = network.ListNetworkUsageRequest() + request = volume.ListVolumesRequest() - request.location = "location_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), "__call__" - ) as call: - call.return_value = network.ListNetworkUsageResponse() - client.list_network_usage(request) + with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: + call.return_value = volume.ListVolumesResponse() + client.list_volumes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4311,30 +4389,28 @@ def test_list_network_usage_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "location=location_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_network_usage_field_headers_async(): +async def test_list_volumes_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = network.ListNetworkUsageRequest() + request = volume.ListVolumesRequest() - request.location = "location_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - network.ListNetworkUsageResponse() + volume.ListVolumesResponse() ) - await client.list_network_usage(request) + await client.list_volumes(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4345,37 +4421,35 @@ async def test_list_network_usage_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "location=location_value", + "parent=parent_value", ) in kw["metadata"] -def test_list_network_usage_flattened(): +def test_list_volumes_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = network.ListNetworkUsageResponse() + call.return_value = volume.ListVolumesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_network_usage( - location="location_value", + client.list_volumes( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_list_network_usage_flattened_error(): +def test_list_volumes_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4383,45 +4457,43 @@ def test_list_network_usage_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_network_usage( - network.ListNetworkUsageRequest(), - location="location_value", + client.list_volumes( + volume.ListVolumesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_list_network_usage_flattened_async(): +async def test_list_volumes_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = network.ListNetworkUsageResponse() + call.return_value = volume.ListVolumesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - network.ListNetworkUsageResponse() + volume.ListVolumesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_network_usage( - location="location_value", + response = await client.list_volumes( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_network_usage_flattened_error_async(): +async def test_list_volumes_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4429,181 +4501,401 @@ async def test_list_network_usage_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_network_usage( - network.ListNetworkUsageRequest(), - location="location_value", + await client.list_volumes( + volume.ListVolumesRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - network.GetNetworkRequest, - dict, - ], -) -def test_get_network(request_type, transport: str = "grpc"): +def test_list_volumes_pager(transport_name: str = "grpc"): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_network), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = network.Network( - name="name_value", - id="id_value", - type_=network.Network.Type.CLIENT, - ip_address="ip_address_value", - mac_address=["mac_address_value"], - state=network.Network.State.PROVISIONING, - vlan_id="vlan_id_value", - cidr="cidr_value", - services_cidr="services_cidr_value", + with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + volume.Volume(), + ], + next_page_token="abc", + ), + volume.ListVolumesResponse( + volumes=[], + next_page_token="def", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + ], + next_page_token="ghi", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + ], + ), + RuntimeError, ) - response = client.get_network(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == network.GetNetworkRequest() + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_volumes(request={}) - # Establish that the response is the type that we expect. - assert isinstance(response, network.Network) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.type_ == network.Network.Type.CLIENT - assert response.ip_address == "ip_address_value" - assert response.mac_address == ["mac_address_value"] - assert response.state == network.Network.State.PROVISIONING - assert response.vlan_id == "vlan_id_value" - assert response.cidr == "cidr_value" - assert response.services_cidr == "services_cidr_value" + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, volume.Volume) for i in results) -def test_get_network_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_volumes_pages(transport_name: str = "grpc"): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_network), "__call__") as call: - client.get_network() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == network.GetNetworkRequest() + with mock.patch.object(type(client.transport.list_volumes), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + volume.Volume(), + ], + next_page_token="abc", + ), + volume.ListVolumesResponse( + volumes=[], + next_page_token="def", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + ], + next_page_token="ghi", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + ], + ), + RuntimeError, + ) + pages = list(client.list_volumes(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_get_network_async( - transport: str = "grpc_asyncio", request_type=network.GetNetworkRequest -): +async def test_list_volumes_async_pager(): client = BareMetalSolutionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_network), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - network.Network( - name="name_value", - id="id_value", - type_=network.Network.Type.CLIENT, - ip_address="ip_address_value", - mac_address=["mac_address_value"], - state=network.Network.State.PROVISIONING, - vlan_id="vlan_id_value", - cidr="cidr_value", - services_cidr="services_cidr_value", - ) - ) - response = await client.get_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == network.GetNetworkRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, network.Network) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.type_ == network.Network.Type.CLIENT - assert response.ip_address == "ip_address_value" - assert response.mac_address == ["mac_address_value"] - assert response.state == network.Network.State.PROVISIONING - assert response.vlan_id == "vlan_id_value" - assert response.cidr == "cidr_value" - assert response.services_cidr == "services_cidr_value" + with mock.patch.object( + type(client.transport.list_volumes), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + volume.Volume(), + ], + next_page_token="abc", + ), + volume.ListVolumesResponse( + volumes=[], + next_page_token="def", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + ], + next_page_token="ghi", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_volumes( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, volume.Volume) for i in responses) @pytest.mark.asyncio -async def test_get_network_async_from_dict(): - await test_get_network_async(request_type=dict) +async def test_list_volumes_async_pages(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volumes), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + volume.Volume(), + ], + next_page_token="abc", + ), + volume.ListVolumesResponse( + volumes=[], + next_page_token="def", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + ], + next_page_token="ghi", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_volumes(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_get_network_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + volume.GetVolumeRequest, + dict, + ], +) +def test_get_volume(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = network.GetNetworkRequest() - - request.name = "name_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_network), "__call__") as call: - call.return_value = network.Network() - client.get_network(request) + with mock.patch.object(type(client.transport.get_volume), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = volume.Volume( + name="name_value", + id="id_value", + storage_type=volume.Volume.StorageType.SSD, + state=volume.Volume.State.CREATING, + requested_size_gib=1917, + originally_requested_size_gib=3094, + current_size_gib=1710, + emergency_size_gib=1898, + max_size_gib=1265, + auto_grown_size_gib=2032, + remaining_space_gib=1974, + snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, + snapshot_enabled=True, + pod="pod_value", + protocol=volume.Volume.Protocol.FIBRE_CHANNEL, + boot_volume=True, + performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, + notes="notes_value", + workload_profile=volume.Volume.WorkloadProfile.GENERIC, + instances=["instances_value"], + attached=True, + ) + response = client.get_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == volume.GetVolumeRequest() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] + # Establish that the response is the type that we expect. + assert isinstance(response, volume.Volume) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.storage_type == volume.Volume.StorageType.SSD + assert response.state == volume.Volume.State.CREATING + assert response.requested_size_gib == 1917 + assert response.originally_requested_size_gib == 3094 + assert response.current_size_gib == 1710 + assert response.emergency_size_gib == 1898 + assert response.max_size_gib == 1265 + assert response.auto_grown_size_gib == 2032 + assert response.remaining_space_gib == 1974 assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + response.snapshot_auto_delete_behavior + == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED + ) + assert response.snapshot_enabled is True + assert response.pod == "pod_value" + assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL + assert response.boot_volume is True + assert ( + response.performance_tier + == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED + ) + assert response.notes == "notes_value" + assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC + assert response.instances == ["instances_value"] + assert response.attached is True + + +def test_get_volume_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_volume), "__call__") as call: + client.get_volume() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == volume.GetVolumeRequest() @pytest.mark.asyncio -async def test_get_network_field_headers_async(): +async def test_get_volume_async( + transport: str = "grpc_asyncio", request_type=volume.GetVolumeRequest +): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_volume), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + volume.Volume( + name="name_value", + id="id_value", + storage_type=volume.Volume.StorageType.SSD, + state=volume.Volume.State.CREATING, + requested_size_gib=1917, + originally_requested_size_gib=3094, + current_size_gib=1710, + emergency_size_gib=1898, + max_size_gib=1265, + auto_grown_size_gib=2032, + remaining_space_gib=1974, + snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, + snapshot_enabled=True, + pod="pod_value", + protocol=volume.Volume.Protocol.FIBRE_CHANNEL, + boot_volume=True, + performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, + notes="notes_value", + workload_profile=volume.Volume.WorkloadProfile.GENERIC, + instances=["instances_value"], + attached=True, + ) + ) + response = await client.get_volume(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == volume.GetVolumeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, volume.Volume) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.storage_type == volume.Volume.StorageType.SSD + assert response.state == volume.Volume.State.CREATING + assert response.requested_size_gib == 1917 + assert response.originally_requested_size_gib == 3094 + assert response.current_size_gib == 1710 + assert response.emergency_size_gib == 1898 + assert response.max_size_gib == 1265 + assert response.auto_grown_size_gib == 2032 + assert response.remaining_space_gib == 1974 + assert ( + response.snapshot_auto_delete_behavior + == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED + ) + assert response.snapshot_enabled is True + assert response.pod == "pod_value" + assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL + assert response.boot_volume is True + assert ( + response.performance_tier + == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED + ) + assert response.notes == "notes_value" + assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC + assert response.instances == ["instances_value"] + assert response.attached is True + + +@pytest.mark.asyncio +async def test_get_volume_async_from_dict(): + await test_get_volume_async(request_type=dict) + + +def test_get_volume_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = network.GetNetworkRequest() + request = volume.GetVolumeRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_network), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network()) - await client.get_network(request) + with mock.patch.object(type(client.transport.get_volume), "__call__") as call: + call.return_value = volume.Volume() + client.get_volume(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request @@ -4615,18 +4907,48 @@ async def test_get_network_field_headers_async(): ) in kw["metadata"] -def test_get_network_flattened(): - client = BareMetalSolutionClient( +@pytest.mark.asyncio +async def test_get_volume_field_headers_async(): + client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_network), "__call__") as call: + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = volume.GetVolumeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_volume), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume()) + await client.get_volume(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_volume_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_volume), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = network.Network() + call.return_value = volume.Volume() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_network( + client.get_volume( name="name_value", ) @@ -4639,7 +4961,7 @@ def test_get_network_flattened(): assert arg == mock_val -def test_get_network_flattened_error(): +def test_get_volume_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4647,27 +4969,27 @@ def test_get_network_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_network( - network.GetNetworkRequest(), + client.get_volume( + volume.GetVolumeRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_network_flattened_async(): +async def test_get_volume_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_network), "__call__") as call: + with mock.patch.object(type(client.transport.get_volume), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = network.Network() + call.return_value = volume.Volume() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_network( + response = await client.get_volume( name="name_value", ) @@ -4681,7 +5003,7 @@ async def test_get_network_flattened_async(): @pytest.mark.asyncio -async def test_get_network_flattened_error_async(): +async def test_get_volume_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4689,8 +5011,8 @@ async def test_get_network_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_network( - network.GetNetworkRequest(), + await client.get_volume( + volume.GetVolumeRequest(), name="name_value", ) @@ -4698,11 +5020,11 @@ async def test_get_network_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - gcb_network.UpdateNetworkRequest, + gcb_volume.UpdateVolumeRequest, dict, ], ) -def test_update_network(request_type, transport: str = "grpc"): +def test_update_volume(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4713,21 +5035,21 @@ def test_update_network(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_network), "__call__") as call: + with mock.patch.object(type(client.transport.update_volume), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_network(request) + response = client.update_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == gcb_network.UpdateNetworkRequest() + assert args[0] == gcb_volume.UpdateVolumeRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_update_network_empty_call(): +def test_update_volume_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( @@ -4736,16 +5058,16 @@ def test_update_network_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_network), "__call__") as call: - client.update_network() + with mock.patch.object(type(client.transport.update_volume), "__call__") as call: + client.update_volume() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == gcb_network.UpdateNetworkRequest() + assert args[0] == gcb_volume.UpdateVolumeRequest() @pytest.mark.asyncio -async def test_update_network_async( - transport: str = "grpc_asyncio", request_type=gcb_network.UpdateNetworkRequest +async def test_update_volume_async( + transport: str = "grpc_asyncio", request_type=gcb_volume.UpdateVolumeRequest ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4757,42 +5079,42 @@ async def test_update_network_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_network), "__call__") as call: + with mock.patch.object(type(client.transport.update_volume), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_network(request) + response = await client.update_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == gcb_network.UpdateNetworkRequest() + assert args[0] == gcb_volume.UpdateVolumeRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_network_async_from_dict(): - await test_update_network_async(request_type=dict) +async def test_update_volume_async_from_dict(): + await test_update_volume_async(request_type=dict) -def test_update_network_field_headers(): +def test_update_volume_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcb_network.UpdateNetworkRequest() + request = gcb_volume.UpdateVolumeRequest() - request.network.name = "name_value" + request.volume.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_network), "__call__") as call: + with mock.patch.object(type(client.transport.update_volume), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_network(request) + client.update_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4803,28 +5125,28 @@ def test_update_network_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "network.name=name_value", + "volume.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_network_field_headers_async(): +async def test_update_volume_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcb_network.UpdateNetworkRequest() + request = gcb_volume.UpdateVolumeRequest() - request.network.name = "name_value" + request.volume.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_network), "__call__") as call: + with mock.patch.object(type(client.transport.update_volume), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_network(request) + await client.update_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4835,23 +5157,23 @@ async def test_update_network_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "network.name=name_value", + "volume.name=name_value", ) in kw["metadata"] -def test_update_network_flattened(): +def test_update_volume_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_network), "__call__") as call: + with mock.patch.object(type(client.transport.update_volume), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_network( - network=gcb_network.Network(name="name_value"), + client.update_volume( + volume=gcb_volume.Volume(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4859,15 +5181,15 @@ def test_update_network_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].network - mock_val = gcb_network.Network(name="name_value") + arg = args[0].volume + mock_val = gcb_volume.Volume(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_update_network_flattened_error(): +def test_update_volume_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4875,21 +5197,21 @@ def test_update_network_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_network( - gcb_network.UpdateNetworkRequest(), - network=gcb_network.Network(name="name_value"), + client.update_volume( + gcb_volume.UpdateVolumeRequest(), + volume=gcb_volume.Volume(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_update_network_flattened_async(): +async def test_update_volume_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_network), "__call__") as call: + with mock.patch.object(type(client.transport.update_volume), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -4898,8 +5220,8 @@ async def test_update_network_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_network( - network=gcb_network.Network(name="name_value"), + response = await client.update_volume( + volume=gcb_volume.Volume(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4907,8 +5229,8 @@ async def test_update_network_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].network - mock_val = gcb_network.Network(name="name_value") + arg = args[0].volume + mock_val = gcb_volume.Volume(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -4916,7 +5238,7 @@ async def test_update_network_flattened_async(): @pytest.mark.asyncio -async def test_update_network_flattened_error_async(): +async def test_update_volume_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4924,9 +5246,9 @@ async def test_update_network_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_network( - gcb_network.UpdateNetworkRequest(), - network=gcb_network.Network(name="name_value"), + await client.update_volume( + gcb_volume.UpdateVolumeRequest(), + volume=gcb_volume.Volume(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4934,11 +5256,11 @@ async def test_update_network_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - lun.GetLunRequest, + volume.RenameVolumeRequest, dict, ], ) -def test_get_lun(request_type, transport: str = "grpc"): +def test_rename_volume(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4949,42 +5271,70 @@ def test_get_lun(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_lun), "__call__") as call: + with mock.patch.object(type(client.transport.rename_volume), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = lun.Lun( + call.return_value = volume.Volume( name="name_value", id="id_value", - state=lun.Lun.State.CREATING, - size_gb=739, - multiprotocol_type=lun.Lun.MultiprotocolType.LINUX, - storage_volume="storage_volume_value", - shareable=True, - boot_lun=True, - storage_type=lun.Lun.StorageType.SSD, - wwid="wwid_value", + storage_type=volume.Volume.StorageType.SSD, + state=volume.Volume.State.CREATING, + requested_size_gib=1917, + originally_requested_size_gib=3094, + current_size_gib=1710, + emergency_size_gib=1898, + max_size_gib=1265, + auto_grown_size_gib=2032, + remaining_space_gib=1974, + snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, + snapshot_enabled=True, + pod="pod_value", + protocol=volume.Volume.Protocol.FIBRE_CHANNEL, + boot_volume=True, + performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, + notes="notes_value", + workload_profile=volume.Volume.WorkloadProfile.GENERIC, + instances=["instances_value"], + attached=True, ) - response = client.get_lun(request) + response = client.rename_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == lun.GetLunRequest() + assert args[0] == volume.RenameVolumeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, lun.Lun) + assert isinstance(response, volume.Volume) assert response.name == "name_value" assert response.id == "id_value" - assert response.state == lun.Lun.State.CREATING - assert response.size_gb == 739 - assert response.multiprotocol_type == lun.Lun.MultiprotocolType.LINUX - assert response.storage_volume == "storage_volume_value" - assert response.shareable is True - assert response.boot_lun is True - assert response.storage_type == lun.Lun.StorageType.SSD - assert response.wwid == "wwid_value" + assert response.storage_type == volume.Volume.StorageType.SSD + assert response.state == volume.Volume.State.CREATING + assert response.requested_size_gib == 1917 + assert response.originally_requested_size_gib == 3094 + assert response.current_size_gib == 1710 + assert response.emergency_size_gib == 1898 + assert response.max_size_gib == 1265 + assert response.auto_grown_size_gib == 2032 + assert response.remaining_space_gib == 1974 + assert ( + response.snapshot_auto_delete_behavior + == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED + ) + assert response.snapshot_enabled is True + assert response.pod == "pod_value" + assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL + assert response.boot_volume is True + assert ( + response.performance_tier + == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED + ) + assert response.notes == "notes_value" + assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC + assert response.instances == ["instances_value"] + assert response.attached is True -def test_get_lun_empty_call(): +def test_rename_volume_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( @@ -4993,16 +5343,16 @@ def test_get_lun_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_lun), "__call__") as call: - client.get_lun() + with mock.patch.object(type(client.transport.rename_volume), "__call__") as call: + client.rename_volume() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == lun.GetLunRequest() + assert args[0] == volume.RenameVolumeRequest() @pytest.mark.asyncio -async def test_get_lun_async( - transport: str = "grpc_asyncio", request_type=lun.GetLunRequest +async def test_rename_volume_async( + transport: str = "grpc_asyncio", request_type=volume.RenameVolumeRequest ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5014,63 +5364,91 @@ async def test_get_lun_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_lun), "__call__") as call: + with mock.patch.object(type(client.transport.rename_volume), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - lun.Lun( + volume.Volume( name="name_value", id="id_value", - state=lun.Lun.State.CREATING, - size_gb=739, - multiprotocol_type=lun.Lun.MultiprotocolType.LINUX, - storage_volume="storage_volume_value", - shareable=True, - boot_lun=True, - storage_type=lun.Lun.StorageType.SSD, - wwid="wwid_value", + storage_type=volume.Volume.StorageType.SSD, + state=volume.Volume.State.CREATING, + requested_size_gib=1917, + originally_requested_size_gib=3094, + current_size_gib=1710, + emergency_size_gib=1898, + max_size_gib=1265, + auto_grown_size_gib=2032, + remaining_space_gib=1974, + snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, + snapshot_enabled=True, + pod="pod_value", + protocol=volume.Volume.Protocol.FIBRE_CHANNEL, + boot_volume=True, + performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, + notes="notes_value", + workload_profile=volume.Volume.WorkloadProfile.GENERIC, + instances=["instances_value"], + attached=True, ) ) - response = await client.get_lun(request) + response = await client.rename_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == lun.GetLunRequest() + assert args[0] == volume.RenameVolumeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, lun.Lun) + assert isinstance(response, volume.Volume) assert response.name == "name_value" assert response.id == "id_value" - assert response.state == lun.Lun.State.CREATING - assert response.size_gb == 739 - assert response.multiprotocol_type == lun.Lun.MultiprotocolType.LINUX - assert response.storage_volume == "storage_volume_value" - assert response.shareable is True - assert response.boot_lun is True - assert response.storage_type == lun.Lun.StorageType.SSD - assert response.wwid == "wwid_value" - - -@pytest.mark.asyncio -async def test_get_lun_async_from_dict(): - await test_get_lun_async(request_type=dict) - - -def test_get_lun_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as + assert response.storage_type == volume.Volume.StorageType.SSD + assert response.state == volume.Volume.State.CREATING + assert response.requested_size_gib == 1917 + assert response.originally_requested_size_gib == 3094 + assert response.current_size_gib == 1710 + assert response.emergency_size_gib == 1898 + assert response.max_size_gib == 1265 + assert response.auto_grown_size_gib == 2032 + assert response.remaining_space_gib == 1974 + assert ( + response.snapshot_auto_delete_behavior + == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED + ) + assert response.snapshot_enabled is True + assert response.pod == "pod_value" + assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL + assert response.boot_volume is True + assert ( + response.performance_tier + == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED + ) + assert response.notes == "notes_value" + assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC + assert response.instances == ["instances_value"] + assert response.attached is True + + +@pytest.mark.asyncio +async def test_rename_volume_async_from_dict(): + await test_rename_volume_async(request_type=dict) + + +def test_rename_volume_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = lun.GetLunRequest() + request = volume.RenameVolumeRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_lun), "__call__") as call: - call.return_value = lun.Lun() - client.get_lun(request) + with mock.patch.object(type(client.transport.rename_volume), "__call__") as call: + call.return_value = volume.Volume() + client.rename_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5086,21 +5464,21 @@ def test_get_lun_field_headers(): @pytest.mark.asyncio -async def test_get_lun_field_headers_async(): +async def test_rename_volume_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = lun.GetLunRequest() + request = volume.RenameVolumeRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_lun), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(lun.Lun()) - await client.get_lun(request) + with mock.patch.object(type(client.transport.rename_volume), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume()) + await client.rename_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5115,19 +5493,20 @@ async def test_get_lun_field_headers_async(): ) in kw["metadata"] -def test_get_lun_flattened(): +def test_rename_volume_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_lun), "__call__") as call: + with mock.patch.object(type(client.transport.rename_volume), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = lun.Lun() + call.return_value = volume.Volume() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_lun( + client.rename_volume( name="name_value", + new_volume_id="new_volume_id_value", ) # Establish that the underlying call was made with the expected @@ -5137,9 +5516,12 @@ def test_get_lun_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val + arg = args[0].new_volume_id + mock_val = "new_volume_id_value" + assert arg == mock_val -def test_get_lun_flattened_error(): +def test_rename_volume_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5147,28 +5529,30 @@ def test_get_lun_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_lun( - lun.GetLunRequest(), + client.rename_volume( + volume.RenameVolumeRequest(), name="name_value", + new_volume_id="new_volume_id_value", ) @pytest.mark.asyncio -async def test_get_lun_flattened_async(): +async def test_rename_volume_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_lun), "__call__") as call: + with mock.patch.object(type(client.transport.rename_volume), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = lun.Lun() + call.return_value = volume.Volume() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(lun.Lun()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_lun( + response = await client.rename_volume( name="name_value", + new_volume_id="new_volume_id_value", ) # Establish that the underlying call was made with the expected @@ -5178,10 +5562,13 @@ async def test_get_lun_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val + arg = args[0].new_volume_id + mock_val = "new_volume_id_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_get_lun_flattened_error_async(): +async def test_rename_volume_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5189,20 +5576,21 @@ async def test_get_lun_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_lun( - lun.GetLunRequest(), + await client.rename_volume( + volume.RenameVolumeRequest(), name="name_value", + new_volume_id="new_volume_id_value", ) @pytest.mark.parametrize( "request_type", [ - lun.ListLunsRequest, + volume.EvictVolumeRequest, dict, ], ) -def test_list_luns(request_type, transport: str = "grpc"): +def test_evict_volume(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5213,26 +5601,21 @@ def test_list_luns(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + with mock.patch.object(type(client.transport.evict_volume), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = lun.ListLunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_luns(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.evict_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == lun.ListLunsRequest() + assert args[0] == volume.EvictVolumeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLunsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_luns_empty_call(): +def test_evict_volume_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( @@ -5241,16 +5624,16 @@ def test_list_luns_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_luns), "__call__") as call: - client.list_luns() + with mock.patch.object(type(client.transport.evict_volume), "__call__") as call: + client.evict_volume() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == lun.ListLunsRequest() + assert args[0] == volume.EvictVolumeRequest() @pytest.mark.asyncio -async def test_list_luns_async( - transport: str = "grpc_asyncio", request_type=lun.ListLunsRequest +async def test_evict_volume_async( + transport: str = "grpc_asyncio", request_type=volume.EvictVolumeRequest ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5262,47 +5645,42 @@ async def test_list_luns_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + with mock.patch.object(type(client.transport.evict_volume), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - lun.ListLunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_luns(request) + response = await client.evict_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == lun.ListLunsRequest() + assert args[0] == volume.EvictVolumeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLunsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_luns_async_from_dict(): - await test_list_luns_async(request_type=dict) +async def test_evict_volume_async_from_dict(): + await test_evict_volume_async(request_type=dict) -def test_list_luns_field_headers(): +def test_evict_volume_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = lun.ListLunsRequest() + request = volume.EvictVolumeRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_luns), "__call__") as call: - call.return_value = lun.ListLunsResponse() - client.list_luns(request) + with mock.patch.object(type(client.transport.evict_volume), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.evict_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5313,28 +5691,28 @@ def test_list_luns_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_luns_field_headers_async(): +async def test_evict_volume_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = lun.ListLunsRequest() + request = volume.EvictVolumeRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + with mock.patch.object(type(client.transport.evict_volume), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - lun.ListLunsResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_luns(request) + await client.evict_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5345,35 +5723,35 @@ async def test_list_luns_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_luns_flattened(): +def test_evict_volume_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + with mock.patch.object(type(client.transport.evict_volume), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = lun.ListLunsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_luns( - parent="parent_value", + client.evict_volume( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_luns_flattened_error(): +def test_evict_volume_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5381,43 +5759,43 @@ def test_list_luns_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_luns( - lun.ListLunsRequest(), - parent="parent_value", + client.evict_volume( + volume.EvictVolumeRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_luns_flattened_async(): +async def test_evict_volume_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + with mock.patch.object(type(client.transport.evict_volume), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = lun.ListLunsResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - lun.ListLunsResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_luns( - parent="parent_value", + response = await client.evict_volume( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_luns_flattened_error_async(): +async def test_evict_volume_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5425,262 +5803,63 @@ async def test_list_luns_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_luns( - lun.ListLunsRequest(), - parent="parent_value", + await client.evict_volume( + volume.EvictVolumeRequest(), + name="name_value", ) -def test_list_luns_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + gcb_volume.ResizeVolumeRequest, + dict, + ], +) +def test_resize_volume(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_luns), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - lun.Lun(), - ], - next_page_token="abc", - ), - lun.ListLunsResponse( - luns=[], - next_page_token="def", - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - ], - next_page_token="ghi", - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - ], - ), - RuntimeError, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_luns(request={}) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.resize_volume(request) - assert pager._metadata == metadata + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_volume.ResizeVolumeRequest() - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, lun.Lun) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_luns_pages(transport_name: str = "grpc"): +def test_resize_volume_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_luns), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - lun.Lun(), - ], - next_page_token="abc", - ), - lun.ListLunsResponse( - luns=[], - next_page_token="def", - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - ], - next_page_token="ghi", - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - ], - ), - RuntimeError, - ) - pages = list(client.list_luns(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: + client.resize_volume() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_volume.ResizeVolumeRequest() @pytest.mark.asyncio -async def test_list_luns_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - lun.Lun(), - ], - next_page_token="abc", - ), - lun.ListLunsResponse( - luns=[], - next_page_token="def", - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - ], - next_page_token="ghi", - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_luns( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, lun.Lun) for i in responses) - - -@pytest.mark.asyncio -async def test_list_luns_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - lun.Lun(), - ], - next_page_token="abc", - ), - lun.ListLunsResponse( - luns=[], - next_page_token="def", - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - ], - next_page_token="ghi", - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_luns(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - nfs_share.GetNfsShareRequest, - dict, - ], -) -def test_get_nfs_share(request_type, transport: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = nfs_share.NfsShare( - name="name_value", - nfs_share_id="nfs_share_id_value", - state=nfs_share.NfsShare.State.PROVISIONED, - volume="volume_value", - ) - response = client.get_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == nfs_share.GetNfsShareRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, nfs_share.NfsShare) - assert response.name == "name_value" - assert response.nfs_share_id == "nfs_share_id_value" - assert response.state == nfs_share.NfsShare.State.PROVISIONED - assert response.volume == "volume_value" - - -def test_get_nfs_share_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: - client.get_nfs_share() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == nfs_share.GetNfsShareRequest() - - -@pytest.mark.asyncio -async def test_get_nfs_share_async( - transport: str = "grpc_asyncio", request_type=nfs_share.GetNfsShareRequest +async def test_resize_volume_async( + transport: str = "grpc_asyncio", request_type=gcb_volume.ResizeVolumeRequest ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5692,51 +5871,42 @@ async def test_get_nfs_share_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: + with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - nfs_share.NfsShare( - name="name_value", - nfs_share_id="nfs_share_id_value", - state=nfs_share.NfsShare.State.PROVISIONED, - volume="volume_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_nfs_share(request) + response = await client.resize_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == nfs_share.GetNfsShareRequest() + assert args[0] == gcb_volume.ResizeVolumeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, nfs_share.NfsShare) - assert response.name == "name_value" - assert response.nfs_share_id == "nfs_share_id_value" - assert response.state == nfs_share.NfsShare.State.PROVISIONED - assert response.volume == "volume_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_nfs_share_async_from_dict(): - await test_get_nfs_share_async(request_type=dict) +async def test_resize_volume_async_from_dict(): + await test_resize_volume_async(request_type=dict) -def test_get_nfs_share_field_headers(): +def test_resize_volume_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = nfs_share.GetNfsShareRequest() + request = gcb_volume.ResizeVolumeRequest() - request.name = "name_value" + request.volume = "volume_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: - call.return_value = nfs_share.NfsShare() - client.get_nfs_share(request) + with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.resize_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5747,26 +5917,28 @@ def test_get_nfs_share_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "volume=volume_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_nfs_share_field_headers_async(): +async def test_resize_volume_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = nfs_share.GetNfsShareRequest() + request = gcb_volume.ResizeVolumeRequest() - request.name = "name_value" + request.volume = "volume_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare()) - await client.get_nfs_share(request) + with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.resize_volume(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5777,35 +5949,39 @@ async def test_get_nfs_share_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "volume=volume_value", ) in kw["metadata"] -def test_get_nfs_share_flattened(): +def test_resize_volume_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: + with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = nfs_share.NfsShare() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_nfs_share( - name="name_value", + client.resize_volume( + volume="volume_value", + size_gib=844, ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].volume + mock_val = "volume_value" + assert arg == mock_val + arg = args[0].size_gib + mock_val = 844 assert arg == mock_val -def test_get_nfs_share_flattened_error(): +def test_resize_volume_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5813,41 +5989,48 @@ def test_get_nfs_share_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_nfs_share( - nfs_share.GetNfsShareRequest(), - name="name_value", + client.resize_volume( + gcb_volume.ResizeVolumeRequest(), + volume="volume_value", + size_gib=844, ) @pytest.mark.asyncio -async def test_get_nfs_share_flattened_async(): +async def test_resize_volume_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: + with mock.patch.object(type(client.transport.resize_volume), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = nfs_share.NfsShare() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_nfs_share( - name="name_value", + response = await client.resize_volume( + volume="volume_value", + size_gib=844, ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].volume + mock_val = "volume_value" + assert arg == mock_val + arg = args[0].size_gib + mock_val = 844 assert arg == mock_val @pytest.mark.asyncio -async def test_get_nfs_share_flattened_error_async(): +async def test_resize_volume_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5855,20 +6038,21 @@ async def test_get_nfs_share_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_nfs_share( - nfs_share.GetNfsShareRequest(), - name="name_value", + await client.resize_volume( + gcb_volume.ResizeVolumeRequest(), + volume="volume_value", + size_gib=844, ) @pytest.mark.parametrize( "request_type", [ - nfs_share.ListNfsSharesRequest, + network.ListNetworksRequest, dict, ], ) -def test_list_nfs_shares(request_type, transport: str = "grpc"): +def test_list_networks(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5879,26 +6063,26 @@ def test_list_nfs_shares(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + with mock.patch.object(type(client.transport.list_networks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = nfs_share.ListNfsSharesResponse( + call.return_value = network.ListNetworksResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_nfs_shares(request) + response = client.list_networks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == nfs_share.ListNfsSharesRequest() + assert args[0] == network.ListNetworksRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNfsSharesPager) + assert isinstance(response, pagers.ListNetworksPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_nfs_shares_empty_call(): +def test_list_networks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( @@ -5907,16 +6091,16 @@ def test_list_nfs_shares_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: - client.list_nfs_shares() + with mock.patch.object(type(client.transport.list_networks), "__call__") as call: + client.list_networks() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == nfs_share.ListNfsSharesRequest() + assert args[0] == network.ListNetworksRequest() @pytest.mark.asyncio -async def test_list_nfs_shares_async( - transport: str = "grpc_asyncio", request_type=nfs_share.ListNfsSharesRequest +async def test_list_networks_async( + transport: str = "grpc_asyncio", request_type=network.ListNetworksRequest ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5928,47 +6112,47 @@ async def test_list_nfs_shares_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + with mock.patch.object(type(client.transport.list_networks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - nfs_share.ListNfsSharesResponse( + network.ListNetworksResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_nfs_shares(request) + response = await client.list_networks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == nfs_share.ListNfsSharesRequest() + assert args[0] == network.ListNetworksRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNfsSharesAsyncPager) + assert isinstance(response, pagers.ListNetworksAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_nfs_shares_async_from_dict(): - await test_list_nfs_shares_async(request_type=dict) +async def test_list_networks_async_from_dict(): + await test_list_networks_async(request_type=dict) -def test_list_nfs_shares_field_headers(): +def test_list_networks_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = nfs_share.ListNfsSharesRequest() + request = network.ListNetworksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: - call.return_value = nfs_share.ListNfsSharesResponse() - client.list_nfs_shares(request) + with mock.patch.object(type(client.transport.list_networks), "__call__") as call: + call.return_value = network.ListNetworksResponse() + client.list_networks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5984,23 +6168,23 @@ def test_list_nfs_shares_field_headers(): @pytest.mark.asyncio -async def test_list_nfs_shares_field_headers_async(): +async def test_list_networks_field_headers_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = nfs_share.ListNfsSharesRequest() + request = network.ListNetworksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + with mock.patch.object(type(client.transport.list_networks), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - nfs_share.ListNfsSharesResponse() + network.ListNetworksResponse() ) - await client.list_nfs_shares(request) + await client.list_networks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6015,18 +6199,18 @@ async def test_list_nfs_shares_field_headers_async(): ) in kw["metadata"] -def test_list_nfs_shares_flattened(): +def test_list_networks_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + with mock.patch.object(type(client.transport.list_networks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = nfs_share.ListNfsSharesResponse() + call.return_value = network.ListNetworksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_nfs_shares( + client.list_networks( parent="parent_value", ) @@ -6039,7 +6223,7 @@ def test_list_nfs_shares_flattened(): assert arg == mock_val -def test_list_nfs_shares_flattened_error(): +def test_list_networks_flattened_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6047,29 +6231,29 @@ def test_list_nfs_shares_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_nfs_shares( - nfs_share.ListNfsSharesRequest(), + client.list_networks( + network.ListNetworksRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_nfs_shares_flattened_async(): +async def test_list_networks_flattened_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + with mock.patch.object(type(client.transport.list_networks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = nfs_share.ListNfsSharesResponse() + call.return_value = network.ListNetworksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - nfs_share.ListNfsSharesResponse() + network.ListNetworksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_nfs_shares( + response = await client.list_networks( parent="parent_value", ) @@ -6083,7 +6267,7 @@ async def test_list_nfs_shares_flattened_async(): @pytest.mark.asyncio -async def test_list_nfs_shares_flattened_error_async(): +async def test_list_networks_flattened_error_async(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6091,44 +6275,44 @@ async def test_list_nfs_shares_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_nfs_shares( - nfs_share.ListNfsSharesRequest(), + await client.list_networks( + network.ListNetworksRequest(), parent="parent_value", ) -def test_list_nfs_shares_pager(transport_name: str = "grpc"): +def test_list_networks_pager(transport_name: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + with mock.patch.object(type(client.transport.list_networks), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), + network.Network(), + network.Network(), ], next_page_token="abc", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[], + network.ListNetworksResponse( + networks=[], next_page_token="def", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), ], next_page_token="ghi", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), + network.Network(), ], ), RuntimeError, @@ -6138,95 +6322,95 @@ def test_list_nfs_shares_pager(transport_name: str = "grpc"): metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_nfs_shares(request={}) + pager = client.list_networks(request={}) assert pager._metadata == metadata results = list(pager) assert len(results) == 6 - assert all(isinstance(i, nfs_share.NfsShare) for i in results) + assert all(isinstance(i, network.Network) for i in results) -def test_list_nfs_shares_pages(transport_name: str = "grpc"): +def test_list_networks_pages(transport_name: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + with mock.patch.object(type(client.transport.list_networks), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), + network.Network(), + network.Network(), ], next_page_token="abc", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[], + network.ListNetworksResponse( + networks=[], next_page_token="def", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), ], next_page_token="ghi", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), + network.Network(), ], ), RuntimeError, ) - pages = list(client.list_nfs_shares(request={}).pages) + pages = list(client.list_networks(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_nfs_shares_async_pager(): +async def test_list_networks_async_pager(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_nfs_shares), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_networks), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), + network.Network(), + network.Network(), ], next_page_token="abc", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[], + network.ListNetworksResponse( + networks=[], next_page_token="def", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), ], next_page_token="ghi", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), + network.Network(), ], ), RuntimeError, ) - async_pager = await client.list_nfs_shares( + async_pager = await client.list_networks( request={}, ) assert async_pager.next_page_token == "abc" @@ -6235,43 +6419,43 @@ async def test_list_nfs_shares_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, nfs_share.NfsShare) for i in responses) + assert all(isinstance(i, network.Network) for i in responses) @pytest.mark.asyncio -async def test_list_nfs_shares_async_pages(): +async def test_list_networks_async_pages(): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_nfs_shares), "__call__", new_callable=mock.AsyncMock + type(client.transport.list_networks), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), + network.Network(), + network.Network(), ], next_page_token="abc", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[], + network.ListNetworksResponse( + networks=[], next_page_token="def", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), ], next_page_token="ghi", ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), + network.ListNetworksResponse( + networks=[ + network.Network(), + network.Network(), ], ), RuntimeError, @@ -6280,7 +6464,7 @@ async def test_list_nfs_shares_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_nfs_shares(request={}) + await client.list_networks(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -6290,11 +6474,11 @@ async def test_list_nfs_shares_async_pages(): @pytest.mark.parametrize( "request_type", [ - gcb_nfs_share.UpdateNfsShareRequest, + network.ListNetworkUsageRequest, dict, ], ) -def test_update_nfs_share(request_type, transport: str = "grpc"): +def test_list_network_usage(request_type, transport: str = "grpc"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6305,21 +6489,23 @@ def test_update_nfs_share(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: + with mock.patch.object( + type(client.transport.list_network_usage), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_nfs_share(request) + call.return_value = network.ListNetworkUsageResponse() + response = client.list_network_usage(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == gcb_nfs_share.UpdateNfsShareRequest() + assert args[0] == network.ListNetworkUsageRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, network.ListNetworkUsageResponse) -def test_update_nfs_share_empty_call(): +def test_list_network_usage_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BareMetalSolutionClient( @@ -6328,16 +6514,18 @@ def test_update_nfs_share_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: - client.update_nfs_share() + with mock.patch.object( + type(client.transport.list_network_usage), "__call__" + ) as call: + client.list_network_usage() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == gcb_nfs_share.UpdateNfsShareRequest() + assert args[0] == network.ListNetworkUsageRequest() @pytest.mark.asyncio -async def test_update_nfs_share_async( - transport: str = "grpc_asyncio", request_type=gcb_nfs_share.UpdateNfsShareRequest +async def test_list_network_usage_async( + transport: str = "grpc_asyncio", request_type=network.ListNetworkUsageRequest ): client = BareMetalSolutionAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6349,228 +6537,14338 @@ async def test_update_nfs_share_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: + with mock.patch.object( + type(client.transport.list_network_usage), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + network.ListNetworkUsageResponse() ) - response = await client.update_nfs_share(request) + response = await client.list_network_usage(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == gcb_nfs_share.UpdateNfsShareRequest() + assert args[0] == network.ListNetworkUsageRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, network.ListNetworkUsageResponse) @pytest.mark.asyncio -async def test_update_nfs_share_async_from_dict(): - await test_update_nfs_share_async(request_type=dict) +async def test_list_network_usage_async_from_dict(): + await test_list_network_usage_async(request_type=dict) -def test_update_nfs_share_field_headers(): +def test_list_network_usage_field_headers(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = gcb_nfs_share.UpdateNfsShareRequest() + request = network.ListNetworkUsageRequest() - request.nfs_share.name = "name_value" + request.location = "location_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_nfs_share(request) + with mock.patch.object( + type(client.transport.list_network_usage), "__call__" + ) as call: + call.return_value = network.ListNetworkUsageResponse() + client.list_network_usage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_network_usage_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = network.ListNetworkUsageRequest() + + request.location = "location_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_usage), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + network.ListNetworkUsageResponse() + ) + await client.list_network_usage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "location=location_value", + ) in kw["metadata"] + + +def test_list_network_usage_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_usage), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = network.ListNetworkUsageResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_network_usage( + location="location_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + + +def test_list_network_usage_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_network_usage( + network.ListNetworkUsageRequest(), + location="location_value", + ) + + +@pytest.mark.asyncio +async def test_list_network_usage_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_network_usage), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = network.ListNetworkUsageResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + network.ListNetworkUsageResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_network_usage( + location="location_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].location + mock_val = "location_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_network_usage_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_network_usage( + network.ListNetworkUsageRequest(), + location="location_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + network.GetNetworkRequest, + dict, + ], +) +def test_get_network(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = network.Network( + name="name_value", + id="id_value", + type_=network.Network.Type.CLIENT, + ip_address="ip_address_value", + mac_address=["mac_address_value"], + state=network.Network.State.PROVISIONING, + vlan_id="vlan_id_value", + cidr="cidr_value", + services_cidr="services_cidr_value", + pod="pod_value", + jumbo_frames_enabled=True, + gateway_ip="gateway_ip_value", + ) + response = client.get_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == network.GetNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, network.Network) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.type_ == network.Network.Type.CLIENT + assert response.ip_address == "ip_address_value" + assert response.mac_address == ["mac_address_value"] + assert response.state == network.Network.State.PROVISIONING + assert response.vlan_id == "vlan_id_value" + assert response.cidr == "cidr_value" + assert response.services_cidr == "services_cidr_value" + assert response.pod == "pod_value" + assert response.jumbo_frames_enabled is True + assert response.gateway_ip == "gateway_ip_value" + + +def test_get_network_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_network), "__call__") as call: + client.get_network() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == network.GetNetworkRequest() + + +@pytest.mark.asyncio +async def test_get_network_async( + transport: str = "grpc_asyncio", request_type=network.GetNetworkRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + network.Network( + name="name_value", + id="id_value", + type_=network.Network.Type.CLIENT, + ip_address="ip_address_value", + mac_address=["mac_address_value"], + state=network.Network.State.PROVISIONING, + vlan_id="vlan_id_value", + cidr="cidr_value", + services_cidr="services_cidr_value", + pod="pod_value", + jumbo_frames_enabled=True, + gateway_ip="gateway_ip_value", + ) + ) + response = await client.get_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == network.GetNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, network.Network) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.type_ == network.Network.Type.CLIENT + assert response.ip_address == "ip_address_value" + assert response.mac_address == ["mac_address_value"] + assert response.state == network.Network.State.PROVISIONING + assert response.vlan_id == "vlan_id_value" + assert response.cidr == "cidr_value" + assert response.services_cidr == "services_cidr_value" + assert response.pod == "pod_value" + assert response.jumbo_frames_enabled is True + assert response.gateway_ip == "gateway_ip_value" + + +@pytest.mark.asyncio +async def test_get_network_async_from_dict(): + await test_get_network_async(request_type=dict) + + +def test_get_network_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = network.GetNetworkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_network), "__call__") as call: + call.return_value = network.Network() + client.get_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_network_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = network.GetNetworkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_network), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network()) + await client.get_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_network_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = network.Network() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_network( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_network_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_network( + network.GetNetworkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_network_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = network.Network() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_network( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_network_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_network( + network.GetNetworkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_network.UpdateNetworkRequest, + dict, + ], +) +def test_update_network(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_network.UpdateNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_network_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_network), "__call__") as call: + client.update_network() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_network.UpdateNetworkRequest() + + +@pytest.mark.asyncio +async def test_update_network_async( + transport: str = "grpc_asyncio", request_type=gcb_network.UpdateNetworkRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_network.UpdateNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_network_async_from_dict(): + await test_update_network_async(request_type=dict) + + +def test_update_network_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcb_network.UpdateNetworkRequest() + + request.network.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_network), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "network.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_network_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcb_network.UpdateNetworkRequest() + + request.network.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_network), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "network.name=name_value", + ) in kw["metadata"] + + +def test_update_network_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_network( + network=gcb_network.Network(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].network + mock_val = gcb_network.Network(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_network_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_network( + gcb_network.UpdateNetworkRequest(), + network=gcb_network.Network(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_network_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_network( + network=gcb_network.Network(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].network + mock_val = gcb_network.Network(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_network_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_network( + gcb_network.UpdateNetworkRequest(), + network=gcb_network.Network(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_volume_snapshot.CreateVolumeSnapshotRequest, + dict, + ], +) +def test_create_volume_snapshot(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_volume_snapshot.VolumeSnapshot( + name="name_value", + id="id_value", + description="description_value", + storage_volume="storage_volume_value", + type_=gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, + ) + response = client.create_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_volume_snapshot.CreateVolumeSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_volume_snapshot.VolumeSnapshot) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.description == "description_value" + assert response.storage_volume == "storage_volume_value" + assert response.type_ == gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC + + +def test_create_volume_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_volume_snapshot), "__call__" + ) as call: + client.create_volume_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_volume_snapshot.CreateVolumeSnapshotRequest() + + +@pytest.mark.asyncio +async def test_create_volume_snapshot_async( + transport: str = "grpc_asyncio", + request_type=gcb_volume_snapshot.CreateVolumeSnapshotRequest, +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_volume_snapshot.VolumeSnapshot( + name="name_value", + id="id_value", + description="description_value", + storage_volume="storage_volume_value", + type_=gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, + ) + ) + response = await client.create_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_volume_snapshot.CreateVolumeSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_volume_snapshot.VolumeSnapshot) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.description == "description_value" + assert response.storage_volume == "storage_volume_value" + assert response.type_ == gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC + + +@pytest.mark.asyncio +async def test_create_volume_snapshot_async_from_dict(): + await test_create_volume_snapshot_async(request_type=dict) + + +def test_create_volume_snapshot_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcb_volume_snapshot.CreateVolumeSnapshotRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_volume_snapshot), "__call__" + ) as call: + call.return_value = gcb_volume_snapshot.VolumeSnapshot() + client.create_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_volume_snapshot_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcb_volume_snapshot.CreateVolumeSnapshotRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_volume_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_volume_snapshot.VolumeSnapshot() + ) + await client.create_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_volume_snapshot_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_volume_snapshot.VolumeSnapshot() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_volume_snapshot( + parent="parent_value", + volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].volume_snapshot + mock_val = gcb_volume_snapshot.VolumeSnapshot(name="name_value") + assert arg == mock_val + + +def test_create_volume_snapshot_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_volume_snapshot( + gcb_volume_snapshot.CreateVolumeSnapshotRequest(), + parent="parent_value", + volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_volume_snapshot_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_volume_snapshot.VolumeSnapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_volume_snapshot.VolumeSnapshot() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_volume_snapshot( + parent="parent_value", + volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].volume_snapshot + mock_val = gcb_volume_snapshot.VolumeSnapshot(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_volume_snapshot_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_volume_snapshot( + gcb_volume_snapshot.CreateVolumeSnapshotRequest(), + parent="parent_value", + volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_volume_snapshot.RestoreVolumeSnapshotRequest, + dict, + ], +) +def test_restore_volume_snapshot(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_volume_snapshot.RestoreVolumeSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_volume_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_volume_snapshot), "__call__" + ) as call: + client.restore_volume_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_volume_snapshot.RestoreVolumeSnapshotRequest() + + +@pytest.mark.asyncio +async def test_restore_volume_snapshot_async( + transport: str = "grpc_asyncio", + request_type=gcb_volume_snapshot.RestoreVolumeSnapshotRequest, +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_volume_snapshot.RestoreVolumeSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_volume_snapshot_async_from_dict(): + await test_restore_volume_snapshot_async(request_type=dict) + + +def test_restore_volume_snapshot_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest() + + request.volume_snapshot = "volume_snapshot_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_volume_snapshot), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "volume_snapshot=volume_snapshot_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restore_volume_snapshot_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest() + + request.volume_snapshot = "volume_snapshot_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_volume_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restore_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "volume_snapshot=volume_snapshot_value", + ) in kw["metadata"] + + +def test_restore_volume_snapshot_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restore_volume_snapshot( + volume_snapshot="volume_snapshot_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].volume_snapshot + mock_val = "volume_snapshot_value" + assert arg == mock_val + + +def test_restore_volume_snapshot_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_volume_snapshot( + gcb_volume_snapshot.RestoreVolumeSnapshotRequest(), + volume_snapshot="volume_snapshot_value", + ) + + +@pytest.mark.asyncio +async def test_restore_volume_snapshot_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restore_volume_snapshot( + volume_snapshot="volume_snapshot_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].volume_snapshot + mock_val = "volume_snapshot_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_restore_volume_snapshot_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restore_volume_snapshot( + gcb_volume_snapshot.RestoreVolumeSnapshotRequest(), + volume_snapshot="volume_snapshot_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + volume_snapshot.DeleteVolumeSnapshotRequest, + dict, + ], +) +def test_delete_volume_snapshot(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == volume_snapshot.DeleteVolumeSnapshotRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_volume_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_volume_snapshot), "__call__" + ) as call: + client.delete_volume_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == volume_snapshot.DeleteVolumeSnapshotRequest() + + +@pytest.mark.asyncio +async def test_delete_volume_snapshot_async( + transport: str = "grpc_asyncio", + request_type=volume_snapshot.DeleteVolumeSnapshotRequest, +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == volume_snapshot.DeleteVolumeSnapshotRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_volume_snapshot_async_from_dict(): + await test_delete_volume_snapshot_async(request_type=dict) + + +def test_delete_volume_snapshot_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = volume_snapshot.DeleteVolumeSnapshotRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_volume_snapshot), "__call__" + ) as call: + call.return_value = None + client.delete_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_volume_snapshot_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = volume_snapshot.DeleteVolumeSnapshotRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_volume_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_volume_snapshot_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_volume_snapshot( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_volume_snapshot_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_volume_snapshot( + volume_snapshot.DeleteVolumeSnapshotRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_volume_snapshot_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_volume_snapshot( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_volume_snapshot_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_volume_snapshot( + volume_snapshot.DeleteVolumeSnapshotRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + volume_snapshot.GetVolumeSnapshotRequest, + dict, + ], +) +def test_get_volume_snapshot(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = volume_snapshot.VolumeSnapshot( + name="name_value", + id="id_value", + description="description_value", + storage_volume="storage_volume_value", + type_=volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, + ) + response = client.get_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == volume_snapshot.GetVolumeSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, volume_snapshot.VolumeSnapshot) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.description == "description_value" + assert response.storage_volume == "storage_volume_value" + assert response.type_ == volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC + + +def test_get_volume_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_volume_snapshot), "__call__" + ) as call: + client.get_volume_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == volume_snapshot.GetVolumeSnapshotRequest() + + +@pytest.mark.asyncio +async def test_get_volume_snapshot_async( + transport: str = "grpc_asyncio", + request_type=volume_snapshot.GetVolumeSnapshotRequest, +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + volume_snapshot.VolumeSnapshot( + name="name_value", + id="id_value", + description="description_value", + storage_volume="storage_volume_value", + type_=volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, + ) + ) + response = await client.get_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == volume_snapshot.GetVolumeSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, volume_snapshot.VolumeSnapshot) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.description == "description_value" + assert response.storage_volume == "storage_volume_value" + assert response.type_ == volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC + + +@pytest.mark.asyncio +async def test_get_volume_snapshot_async_from_dict(): + await test_get_volume_snapshot_async(request_type=dict) + + +def test_get_volume_snapshot_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = volume_snapshot.GetVolumeSnapshotRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_volume_snapshot), "__call__" + ) as call: + call.return_value = volume_snapshot.VolumeSnapshot() + client.get_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_volume_snapshot_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = volume_snapshot.GetVolumeSnapshotRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_volume_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + volume_snapshot.VolumeSnapshot() + ) + await client.get_volume_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_volume_snapshot_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = volume_snapshot.VolumeSnapshot() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_volume_snapshot( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_volume_snapshot_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_volume_snapshot( + volume_snapshot.GetVolumeSnapshotRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_volume_snapshot_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_volume_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = volume_snapshot.VolumeSnapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + volume_snapshot.VolumeSnapshot() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_volume_snapshot( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_volume_snapshot_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_volume_snapshot( + volume_snapshot.GetVolumeSnapshotRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + volume_snapshot.ListVolumeSnapshotsRequest, + dict, + ], +) +def test_list_volume_snapshots(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volume_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = volume_snapshot.ListVolumeSnapshotsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_volume_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == volume_snapshot.ListVolumeSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVolumeSnapshotsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_volume_snapshots_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volume_snapshots), "__call__" + ) as call: + client.list_volume_snapshots() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == volume_snapshot.ListVolumeSnapshotsRequest() + + +@pytest.mark.asyncio +async def test_list_volume_snapshots_async( + transport: str = "grpc_asyncio", + request_type=volume_snapshot.ListVolumeSnapshotsRequest, +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volume_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + volume_snapshot.ListVolumeSnapshotsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_volume_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == volume_snapshot.ListVolumeSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVolumeSnapshotsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_volume_snapshots_async_from_dict(): + await test_list_volume_snapshots_async(request_type=dict) + + +def test_list_volume_snapshots_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = volume_snapshot.ListVolumeSnapshotsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volume_snapshots), "__call__" + ) as call: + call.return_value = volume_snapshot.ListVolumeSnapshotsResponse() + client.list_volume_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_volume_snapshots_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = volume_snapshot.ListVolumeSnapshotsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volume_snapshots), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + volume_snapshot.ListVolumeSnapshotsResponse() + ) + await client.list_volume_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_volume_snapshots_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volume_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = volume_snapshot.ListVolumeSnapshotsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_volume_snapshots( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_volume_snapshots_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_volume_snapshots( + volume_snapshot.ListVolumeSnapshotsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_volume_snapshots_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volume_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = volume_snapshot.ListVolumeSnapshotsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + volume_snapshot.ListVolumeSnapshotsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_volume_snapshots( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_volume_snapshots_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_volume_snapshots( + volume_snapshot.ListVolumeSnapshotsRequest(), + parent="parent_value", + ) + + +def test_list_volume_snapshots_pager(transport_name: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volume_snapshots), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + ], + next_page_token="abc", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[], + next_page_token="def", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + ], + next_page_token="ghi", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_volume_snapshots(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, volume_snapshot.VolumeSnapshot) for i in results) + + +def test_list_volume_snapshots_pages(transport_name: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volume_snapshots), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + ], + next_page_token="abc", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[], + next_page_token="def", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + ], + next_page_token="ghi", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + ], + ), + RuntimeError, + ) + pages = list(client.list_volume_snapshots(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_volume_snapshots_async_pager(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volume_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + ], + next_page_token="abc", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[], + next_page_token="def", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + ], + next_page_token="ghi", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_volume_snapshots( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, volume_snapshot.VolumeSnapshot) for i in responses) + + +@pytest.mark.asyncio +async def test_list_volume_snapshots_async_pages(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_volume_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + ], + next_page_token="abc", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[], + next_page_token="def", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + ], + next_page_token="ghi", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_volume_snapshots(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + lun.GetLunRequest, + dict, + ], +) +def test_get_lun(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_lun), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = lun.Lun( + name="name_value", + id="id_value", + state=lun.Lun.State.CREATING, + size_gb=739, + multiprotocol_type=lun.Lun.MultiprotocolType.LINUX, + storage_volume="storage_volume_value", + shareable=True, + boot_lun=True, + storage_type=lun.Lun.StorageType.SSD, + wwid="wwid_value", + instances=["instances_value"], + ) + response = client.get_lun(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == lun.GetLunRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, lun.Lun) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.state == lun.Lun.State.CREATING + assert response.size_gb == 739 + assert response.multiprotocol_type == lun.Lun.MultiprotocolType.LINUX + assert response.storage_volume == "storage_volume_value" + assert response.shareable is True + assert response.boot_lun is True + assert response.storage_type == lun.Lun.StorageType.SSD + assert response.wwid == "wwid_value" + assert response.instances == ["instances_value"] + + +def test_get_lun_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_lun), "__call__") as call: + client.get_lun() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == lun.GetLunRequest() + + +@pytest.mark.asyncio +async def test_get_lun_async( + transport: str = "grpc_asyncio", request_type=lun.GetLunRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_lun), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + lun.Lun( + name="name_value", + id="id_value", + state=lun.Lun.State.CREATING, + size_gb=739, + multiprotocol_type=lun.Lun.MultiprotocolType.LINUX, + storage_volume="storage_volume_value", + shareable=True, + boot_lun=True, + storage_type=lun.Lun.StorageType.SSD, + wwid="wwid_value", + instances=["instances_value"], + ) + ) + response = await client.get_lun(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == lun.GetLunRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, lun.Lun) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.state == lun.Lun.State.CREATING + assert response.size_gb == 739 + assert response.multiprotocol_type == lun.Lun.MultiprotocolType.LINUX + assert response.storage_volume == "storage_volume_value" + assert response.shareable is True + assert response.boot_lun is True + assert response.storage_type == lun.Lun.StorageType.SSD + assert response.wwid == "wwid_value" + assert response.instances == ["instances_value"] + + +@pytest.mark.asyncio +async def test_get_lun_async_from_dict(): + await test_get_lun_async(request_type=dict) + + +def test_get_lun_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = lun.GetLunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_lun), "__call__") as call: + call.return_value = lun.Lun() + client.get_lun(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_lun_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = lun.GetLunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_lun), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(lun.Lun()) + await client.get_lun(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_lun_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_lun), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = lun.Lun() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_lun( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_lun_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_lun( + lun.GetLunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_lun_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_lun), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = lun.Lun() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(lun.Lun()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_lun( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_lun_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_lun( + lun.GetLunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + lun.ListLunsRequest, + dict, + ], +) +def test_list_luns(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = lun.ListLunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_luns(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == lun.ListLunsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLunsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_luns_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + client.list_luns() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == lun.ListLunsRequest() + + +@pytest.mark.asyncio +async def test_list_luns_async( + transport: str = "grpc_asyncio", request_type=lun.ListLunsRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + lun.ListLunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_luns(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == lun.ListLunsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLunsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_luns_async_from_dict(): + await test_list_luns_async(request_type=dict) + + +def test_list_luns_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = lun.ListLunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + call.return_value = lun.ListLunsResponse() + client.list_luns(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_luns_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = lun.ListLunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + lun.ListLunsResponse() + ) + await client.list_luns(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_luns_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = lun.ListLunsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_luns( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_luns_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_luns( + lun.ListLunsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_luns_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = lun.ListLunsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + lun.ListLunsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_luns( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_luns_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_luns( + lun.ListLunsRequest(), + parent="parent_value", + ) + + +def test_list_luns_pager(transport_name: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + lun.ListLunsResponse( + luns=[ + lun.Lun(), + lun.Lun(), + lun.Lun(), + ], + next_page_token="abc", + ), + lun.ListLunsResponse( + luns=[], + next_page_token="def", + ), + lun.ListLunsResponse( + luns=[ + lun.Lun(), + ], + next_page_token="ghi", + ), + lun.ListLunsResponse( + luns=[ + lun.Lun(), + lun.Lun(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_luns(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, lun.Lun) for i in results) + + +def test_list_luns_pages(transport_name: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_luns), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + lun.ListLunsResponse( + luns=[ + lun.Lun(), + lun.Lun(), + lun.Lun(), + ], + next_page_token="abc", + ), + lun.ListLunsResponse( + luns=[], + next_page_token="def", + ), + lun.ListLunsResponse( + luns=[ + lun.Lun(), + ], + next_page_token="ghi", + ), + lun.ListLunsResponse( + luns=[ + lun.Lun(), + lun.Lun(), + ], + ), + RuntimeError, + ) + pages = list(client.list_luns(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_luns_async_pager(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_luns), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + lun.ListLunsResponse( + luns=[ + lun.Lun(), + lun.Lun(), + lun.Lun(), + ], + next_page_token="abc", + ), + lun.ListLunsResponse( + luns=[], + next_page_token="def", + ), + lun.ListLunsResponse( + luns=[ + lun.Lun(), + ], + next_page_token="ghi", + ), + lun.ListLunsResponse( + luns=[ + lun.Lun(), + lun.Lun(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_luns( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, lun.Lun) for i in responses) + + +@pytest.mark.asyncio +async def test_list_luns_async_pages(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_luns), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + lun.ListLunsResponse( + luns=[ + lun.Lun(), + lun.Lun(), + lun.Lun(), + ], + next_page_token="abc", + ), + lun.ListLunsResponse( + luns=[], + next_page_token="def", + ), + lun.ListLunsResponse( + luns=[ + lun.Lun(), + ], + next_page_token="ghi", + ), + lun.ListLunsResponse( + luns=[ + lun.Lun(), + lun.Lun(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_luns(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + lun.EvictLunRequest, + dict, + ], +) +def test_evict_lun(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.evict_lun), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.evict_lun(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == lun.EvictLunRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_evict_lun_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.evict_lun), "__call__") as call: + client.evict_lun() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == lun.EvictLunRequest() + + +@pytest.mark.asyncio +async def test_evict_lun_async( + transport: str = "grpc_asyncio", request_type=lun.EvictLunRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.evict_lun), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.evict_lun(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == lun.EvictLunRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_evict_lun_async_from_dict(): + await test_evict_lun_async(request_type=dict) + + +def test_evict_lun_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = lun.EvictLunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.evict_lun), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.evict_lun(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_evict_lun_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = lun.EvictLunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.evict_lun), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.evict_lun(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_evict_lun_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.evict_lun), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.evict_lun( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_evict_lun_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.evict_lun( + lun.EvictLunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_evict_lun_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.evict_lun), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.evict_lun( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_evict_lun_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.evict_lun( + lun.EvictLunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + nfs_share.GetNfsShareRequest, + dict, + ], +) +def test_get_nfs_share(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = nfs_share.NfsShare( + name="name_value", + nfs_share_id="nfs_share_id_value", + id="id_value", + state=nfs_share.NfsShare.State.PROVISIONED, + volume="volume_value", + requested_size_gib=1917, + storage_type=nfs_share.NfsShare.StorageType.SSD, + ) + response = client.get_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.GetNfsShareRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, nfs_share.NfsShare) + assert response.name == "name_value" + assert response.nfs_share_id == "nfs_share_id_value" + assert response.id == "id_value" + assert response.state == nfs_share.NfsShare.State.PROVISIONED + assert response.volume == "volume_value" + assert response.requested_size_gib == 1917 + assert response.storage_type == nfs_share.NfsShare.StorageType.SSD + + +def test_get_nfs_share_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: + client.get_nfs_share() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.GetNfsShareRequest() + + +@pytest.mark.asyncio +async def test_get_nfs_share_async( + transport: str = "grpc_asyncio", request_type=nfs_share.GetNfsShareRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + nfs_share.NfsShare( + name="name_value", + nfs_share_id="nfs_share_id_value", + id="id_value", + state=nfs_share.NfsShare.State.PROVISIONED, + volume="volume_value", + requested_size_gib=1917, + storage_type=nfs_share.NfsShare.StorageType.SSD, + ) + ) + response = await client.get_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.GetNfsShareRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, nfs_share.NfsShare) + assert response.name == "name_value" + assert response.nfs_share_id == "nfs_share_id_value" + assert response.id == "id_value" + assert response.state == nfs_share.NfsShare.State.PROVISIONED + assert response.volume == "volume_value" + assert response.requested_size_gib == 1917 + assert response.storage_type == nfs_share.NfsShare.StorageType.SSD + + +@pytest.mark.asyncio +async def test_get_nfs_share_async_from_dict(): + await test_get_nfs_share_async(request_type=dict) + + +def test_get_nfs_share_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = nfs_share.GetNfsShareRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: + call.return_value = nfs_share.NfsShare() + client.get_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_nfs_share_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = nfs_share.GetNfsShareRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare()) + await client.get_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_nfs_share_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = nfs_share.NfsShare() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_nfs_share( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_nfs_share_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_nfs_share( + nfs_share.GetNfsShareRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_nfs_share_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = nfs_share.NfsShare() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_nfs_share( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_nfs_share_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_nfs_share( + nfs_share.GetNfsShareRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + nfs_share.ListNfsSharesRequest, + dict, + ], +) +def test_list_nfs_shares(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = nfs_share.ListNfsSharesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_nfs_shares(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.ListNfsSharesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNfsSharesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_nfs_shares_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + client.list_nfs_shares() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.ListNfsSharesRequest() + + +@pytest.mark.asyncio +async def test_list_nfs_shares_async( + transport: str = "grpc_asyncio", request_type=nfs_share.ListNfsSharesRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + nfs_share.ListNfsSharesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_nfs_shares(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.ListNfsSharesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNfsSharesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_nfs_shares_async_from_dict(): + await test_list_nfs_shares_async(request_type=dict) + + +def test_list_nfs_shares_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = nfs_share.ListNfsSharesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + call.return_value = nfs_share.ListNfsSharesResponse() + client.list_nfs_shares(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_nfs_shares_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = nfs_share.ListNfsSharesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + nfs_share.ListNfsSharesResponse() + ) + await client.list_nfs_shares(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_nfs_shares_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = nfs_share.ListNfsSharesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_nfs_shares( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_nfs_shares_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_nfs_shares( + nfs_share.ListNfsSharesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_nfs_shares_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = nfs_share.ListNfsSharesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + nfs_share.ListNfsSharesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_nfs_shares( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_nfs_shares_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_nfs_shares( + nfs_share.ListNfsSharesRequest(), + parent="parent_value", + ) + + +def test_list_nfs_shares_pager(transport_name: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + nfs_share.NfsShare(), + nfs_share.NfsShare(), + ], + next_page_token="abc", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[], + next_page_token="def", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + ], + next_page_token="ghi", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + nfs_share.NfsShare(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_nfs_shares(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, nfs_share.NfsShare) for i in results) + + +def test_list_nfs_shares_pages(transport_name: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_nfs_shares), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + nfs_share.NfsShare(), + nfs_share.NfsShare(), + ], + next_page_token="abc", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[], + next_page_token="def", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + ], + next_page_token="ghi", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + nfs_share.NfsShare(), + ], + ), + RuntimeError, + ) + pages = list(client.list_nfs_shares(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_nfs_shares_async_pager(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_nfs_shares), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + nfs_share.NfsShare(), + nfs_share.NfsShare(), + ], + next_page_token="abc", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[], + next_page_token="def", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + ], + next_page_token="ghi", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + nfs_share.NfsShare(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_nfs_shares( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, nfs_share.NfsShare) for i in responses) + + +@pytest.mark.asyncio +async def test_list_nfs_shares_async_pages(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_nfs_shares), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + nfs_share.NfsShare(), + nfs_share.NfsShare(), + ], + next_page_token="abc", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[], + next_page_token="def", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + ], + next_page_token="ghi", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + nfs_share.NfsShare(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_nfs_shares(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_nfs_share.UpdateNfsShareRequest, + dict, + ], +) +def test_update_nfs_share(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_nfs_share.UpdateNfsShareRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_nfs_share_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: + client.update_nfs_share() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_nfs_share.UpdateNfsShareRequest() + + +@pytest.mark.asyncio +async def test_update_nfs_share_async( + transport: str = "grpc_asyncio", request_type=gcb_nfs_share.UpdateNfsShareRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_nfs_share.UpdateNfsShareRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_nfs_share_async_from_dict(): + await test_update_nfs_share_async(request_type=dict) + + +def test_update_nfs_share_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcb_nfs_share.UpdateNfsShareRequest() + + request.nfs_share.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "nfs_share.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_nfs_share_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcb_nfs_share.UpdateNfsShareRequest() + + request.nfs_share.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "nfs_share.name=name_value", + ) in kw["metadata"] + + +def test_update_nfs_share_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_nfs_share( + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].nfs_share + mock_val = gcb_nfs_share.NfsShare(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_nfs_share_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_nfs_share( + gcb_nfs_share.UpdateNfsShareRequest(), + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_nfs_share_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_nfs_share( + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].nfs_share + mock_val = gcb_nfs_share.NfsShare(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_nfs_share_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_nfs_share( + gcb_nfs_share.UpdateNfsShareRequest(), + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_nfs_share.CreateNfsShareRequest, + dict, + ], +) +def test_create_nfs_share(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_nfs_share.CreateNfsShareRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_nfs_share_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_nfs_share), "__call__") as call: + client.create_nfs_share() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_nfs_share.CreateNfsShareRequest() + + +@pytest.mark.asyncio +async def test_create_nfs_share_async( + transport: str = "grpc_asyncio", request_type=gcb_nfs_share.CreateNfsShareRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcb_nfs_share.CreateNfsShareRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_nfs_share_async_from_dict(): + await test_create_nfs_share_async(request_type=dict) + + +def test_create_nfs_share_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcb_nfs_share.CreateNfsShareRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_nfs_share), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_nfs_share_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcb_nfs_share.CreateNfsShareRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_nfs_share), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_nfs_share_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_nfs_share( + parent="parent_value", + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].nfs_share + mock_val = gcb_nfs_share.NfsShare(name="name_value") + assert arg == mock_val + + +def test_create_nfs_share_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_nfs_share( + gcb_nfs_share.CreateNfsShareRequest(), + parent="parent_value", + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_nfs_share_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_nfs_share( + parent="parent_value", + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].nfs_share + mock_val = gcb_nfs_share.NfsShare(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_nfs_share_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_nfs_share( + gcb_nfs_share.CreateNfsShareRequest(), + parent="parent_value", + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + nfs_share.RenameNfsShareRequest, + dict, + ], +) +def test_rename_nfs_share(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = nfs_share.NfsShare( + name="name_value", + nfs_share_id="nfs_share_id_value", + id="id_value", + state=nfs_share.NfsShare.State.PROVISIONED, + volume="volume_value", + requested_size_gib=1917, + storage_type=nfs_share.NfsShare.StorageType.SSD, + ) + response = client.rename_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.RenameNfsShareRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, nfs_share.NfsShare) + assert response.name == "name_value" + assert response.nfs_share_id == "nfs_share_id_value" + assert response.id == "id_value" + assert response.state == nfs_share.NfsShare.State.PROVISIONED + assert response.volume == "volume_value" + assert response.requested_size_gib == 1917 + assert response.storage_type == nfs_share.NfsShare.StorageType.SSD + + +def test_rename_nfs_share_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_nfs_share), "__call__") as call: + client.rename_nfs_share() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.RenameNfsShareRequest() + + +@pytest.mark.asyncio +async def test_rename_nfs_share_async( + transport: str = "grpc_asyncio", request_type=nfs_share.RenameNfsShareRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + nfs_share.NfsShare( + name="name_value", + nfs_share_id="nfs_share_id_value", + id="id_value", + state=nfs_share.NfsShare.State.PROVISIONED, + volume="volume_value", + requested_size_gib=1917, + storage_type=nfs_share.NfsShare.StorageType.SSD, + ) + ) + response = await client.rename_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.RenameNfsShareRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, nfs_share.NfsShare) + assert response.name == "name_value" + assert response.nfs_share_id == "nfs_share_id_value" + assert response.id == "id_value" + assert response.state == nfs_share.NfsShare.State.PROVISIONED + assert response.volume == "volume_value" + assert response.requested_size_gib == 1917 + assert response.storage_type == nfs_share.NfsShare.StorageType.SSD + + +@pytest.mark.asyncio +async def test_rename_nfs_share_async_from_dict(): + await test_rename_nfs_share_async(request_type=dict) + + +def test_rename_nfs_share_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = nfs_share.RenameNfsShareRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_nfs_share), "__call__") as call: + call.return_value = nfs_share.NfsShare() + client.rename_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rename_nfs_share_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = nfs_share.RenameNfsShareRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_nfs_share), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare()) + await client.rename_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_rename_nfs_share_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = nfs_share.NfsShare() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rename_nfs_share( + name="name_value", + new_nfsshare_id="new_nfsshare_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].new_nfsshare_id + mock_val = "new_nfsshare_id_value" + assert arg == mock_val + + +def test_rename_nfs_share_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_nfs_share( + nfs_share.RenameNfsShareRequest(), + name="name_value", + new_nfsshare_id="new_nfsshare_id_value", + ) + + +@pytest.mark.asyncio +async def test_rename_nfs_share_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = nfs_share.NfsShare() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rename_nfs_share( + name="name_value", + new_nfsshare_id="new_nfsshare_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].new_nfsshare_id + mock_val = "new_nfsshare_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_rename_nfs_share_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rename_nfs_share( + nfs_share.RenameNfsShareRequest(), + name="name_value", + new_nfsshare_id="new_nfsshare_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + nfs_share.DeleteNfsShareRequest, + dict, + ], +) +def test_delete_nfs_share(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.DeleteNfsShareRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_nfs_share_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_nfs_share), "__call__") as call: + client.delete_nfs_share() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.DeleteNfsShareRequest() + + +@pytest.mark.asyncio +async def test_delete_nfs_share_async( + transport: str = "grpc_asyncio", request_type=nfs_share.DeleteNfsShareRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == nfs_share.DeleteNfsShareRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_nfs_share_async_from_dict(): + await test_delete_nfs_share_async(request_type=dict) + + +def test_delete_nfs_share_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = nfs_share.DeleteNfsShareRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_nfs_share), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_nfs_share_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = nfs_share.DeleteNfsShareRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_nfs_share), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_nfs_share(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_nfs_share_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_nfs_share( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_nfs_share_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_nfs_share( + nfs_share.DeleteNfsShareRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_nfs_share_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_nfs_share), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_nfs_share( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_nfs_share_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_nfs_share( + nfs_share.DeleteNfsShareRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + provisioning.ListProvisioningQuotasRequest, + dict, + ], +) +def test_list_provisioning_quotas(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_provisioning_quotas), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ListProvisioningQuotasResponse( + next_page_token="next_page_token_value", + ) + response = client.list_provisioning_quotas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.ListProvisioningQuotasRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProvisioningQuotasPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_provisioning_quotas_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_provisioning_quotas), "__call__" + ) as call: + client.list_provisioning_quotas() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.ListProvisioningQuotasRequest() + + +@pytest.mark.asyncio +async def test_list_provisioning_quotas_async( + transport: str = "grpc_asyncio", + request_type=provisioning.ListProvisioningQuotasRequest, +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_provisioning_quotas), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ListProvisioningQuotasResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_provisioning_quotas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.ListProvisioningQuotasRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProvisioningQuotasAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_provisioning_quotas_async_from_dict(): + await test_list_provisioning_quotas_async(request_type=dict) + + +def test_list_provisioning_quotas_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = provisioning.ListProvisioningQuotasRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_provisioning_quotas), "__call__" + ) as call: + call.return_value = provisioning.ListProvisioningQuotasResponse() + client.list_provisioning_quotas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_provisioning_quotas_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = provisioning.ListProvisioningQuotasRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_provisioning_quotas), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ListProvisioningQuotasResponse() + ) + await client.list_provisioning_quotas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_provisioning_quotas_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_provisioning_quotas), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ListProvisioningQuotasResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_provisioning_quotas( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_provisioning_quotas_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_provisioning_quotas( + provisioning.ListProvisioningQuotasRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_provisioning_quotas_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_provisioning_quotas), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ListProvisioningQuotasResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ListProvisioningQuotasResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_provisioning_quotas( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_provisioning_quotas_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_provisioning_quotas( + provisioning.ListProvisioningQuotasRequest(), + parent="parent_value", + ) + + +def test_list_provisioning_quotas_pager(transport_name: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_provisioning_quotas), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + ], + next_page_token="abc", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[], + next_page_token="def", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + ], + next_page_token="ghi", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_provisioning_quotas(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, provisioning.ProvisioningQuota) for i in results) + + +def test_list_provisioning_quotas_pages(transport_name: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_provisioning_quotas), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + ], + next_page_token="abc", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[], + next_page_token="def", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + ], + next_page_token="ghi", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + ], + ), + RuntimeError, + ) + pages = list(client.list_provisioning_quotas(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_provisioning_quotas_async_pager(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_provisioning_quotas), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + ], + next_page_token="abc", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[], + next_page_token="def", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + ], + next_page_token="ghi", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_provisioning_quotas( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, provisioning.ProvisioningQuota) for i in responses) + + +@pytest.mark.asyncio +async def test_list_provisioning_quotas_async_pages(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_provisioning_quotas), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + ], + next_page_token="abc", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[], + next_page_token="def", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + ], + next_page_token="ghi", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_provisioning_quotas(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + provisioning.SubmitProvisioningConfigRequest, + dict, + ], +) +def test_submit_provisioning_config(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.SubmitProvisioningConfigResponse() + response = client.submit_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.SubmitProvisioningConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning.SubmitProvisioningConfigResponse) + + +def test_submit_provisioning_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_provisioning_config), "__call__" + ) as call: + client.submit_provisioning_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.SubmitProvisioningConfigRequest() + + +@pytest.mark.asyncio +async def test_submit_provisioning_config_async( + transport: str = "grpc_asyncio", + request_type=provisioning.SubmitProvisioningConfigRequest, +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.SubmitProvisioningConfigResponse() + ) + response = await client.submit_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.SubmitProvisioningConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning.SubmitProvisioningConfigResponse) + + +@pytest.mark.asyncio +async def test_submit_provisioning_config_async_from_dict(): + await test_submit_provisioning_config_async(request_type=dict) + + +def test_submit_provisioning_config_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = provisioning.SubmitProvisioningConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_provisioning_config), "__call__" + ) as call: + call.return_value = provisioning.SubmitProvisioningConfigResponse() + client.submit_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_submit_provisioning_config_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = provisioning.SubmitProvisioningConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_provisioning_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.SubmitProvisioningConfigResponse() + ) + await client.submit_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_submit_provisioning_config_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.SubmitProvisioningConfigResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.submit_provisioning_config( + parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].provisioning_config + mock_val = provisioning.ProvisioningConfig(name="name_value") + assert arg == mock_val + + +def test_submit_provisioning_config_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.submit_provisioning_config( + provisioning.SubmitProvisioningConfigRequest(), + parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_submit_provisioning_config_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.SubmitProvisioningConfigResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.SubmitProvisioningConfigResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.submit_provisioning_config( + parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].provisioning_config + mock_val = provisioning.ProvisioningConfig(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_submit_provisioning_config_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.submit_provisioning_config( + provisioning.SubmitProvisioningConfigRequest(), + parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + provisioning.GetProvisioningConfigRequest, + dict, + ], +) +def test_get_provisioning_config(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ProvisioningConfig( + name="name_value", + ticket_id="ticket_id_value", + handover_service_account="handover_service_account_value", + email="email_value", + state=provisioning.ProvisioningConfig.State.DRAFT, + location="location_value", + cloud_console_uri="cloud_console_uri_value", + vpc_sc_enabled=True, + status_message="status_message_value", + custom_id="custom_id_value", + ) + response = client.get_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.GetProvisioningConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning.ProvisioningConfig) + assert response.name == "name_value" + assert response.ticket_id == "ticket_id_value" + assert response.handover_service_account == "handover_service_account_value" + assert response.email == "email_value" + assert response.state == provisioning.ProvisioningConfig.State.DRAFT + assert response.location == "location_value" + assert response.cloud_console_uri == "cloud_console_uri_value" + assert response.vpc_sc_enabled is True + assert response.status_message == "status_message_value" + assert response.custom_id == "custom_id_value" + + +def test_get_provisioning_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provisioning_config), "__call__" + ) as call: + client.get_provisioning_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.GetProvisioningConfigRequest() + + +@pytest.mark.asyncio +async def test_get_provisioning_config_async( + transport: str = "grpc_asyncio", + request_type=provisioning.GetProvisioningConfigRequest, +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ProvisioningConfig( + name="name_value", + ticket_id="ticket_id_value", + handover_service_account="handover_service_account_value", + email="email_value", + state=provisioning.ProvisioningConfig.State.DRAFT, + location="location_value", + cloud_console_uri="cloud_console_uri_value", + vpc_sc_enabled=True, + status_message="status_message_value", + custom_id="custom_id_value", + ) + ) + response = await client.get_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.GetProvisioningConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning.ProvisioningConfig) + assert response.name == "name_value" + assert response.ticket_id == "ticket_id_value" + assert response.handover_service_account == "handover_service_account_value" + assert response.email == "email_value" + assert response.state == provisioning.ProvisioningConfig.State.DRAFT + assert response.location == "location_value" + assert response.cloud_console_uri == "cloud_console_uri_value" + assert response.vpc_sc_enabled is True + assert response.status_message == "status_message_value" + assert response.custom_id == "custom_id_value" + + +@pytest.mark.asyncio +async def test_get_provisioning_config_async_from_dict(): + await test_get_provisioning_config_async(request_type=dict) + + +def test_get_provisioning_config_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = provisioning.GetProvisioningConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provisioning_config), "__call__" + ) as call: + call.return_value = provisioning.ProvisioningConfig() + client.get_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_provisioning_config_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = provisioning.GetProvisioningConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provisioning_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ProvisioningConfig() + ) + await client.get_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_provisioning_config_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ProvisioningConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_provisioning_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_provisioning_config_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_provisioning_config( + provisioning.GetProvisioningConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_provisioning_config_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ProvisioningConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ProvisioningConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_provisioning_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_provisioning_config_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_provisioning_config( + provisioning.GetProvisioningConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + provisioning.CreateProvisioningConfigRequest, + dict, + ], +) +def test_create_provisioning_config(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ProvisioningConfig( + name="name_value", + ticket_id="ticket_id_value", + handover_service_account="handover_service_account_value", + email="email_value", + state=provisioning.ProvisioningConfig.State.DRAFT, + location="location_value", + cloud_console_uri="cloud_console_uri_value", + vpc_sc_enabled=True, + status_message="status_message_value", + custom_id="custom_id_value", + ) + response = client.create_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.CreateProvisioningConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning.ProvisioningConfig) + assert response.name == "name_value" + assert response.ticket_id == "ticket_id_value" + assert response.handover_service_account == "handover_service_account_value" + assert response.email == "email_value" + assert response.state == provisioning.ProvisioningConfig.State.DRAFT + assert response.location == "location_value" + assert response.cloud_console_uri == "cloud_console_uri_value" + assert response.vpc_sc_enabled is True + assert response.status_message == "status_message_value" + assert response.custom_id == "custom_id_value" + + +def test_create_provisioning_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_provisioning_config), "__call__" + ) as call: + client.create_provisioning_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.CreateProvisioningConfigRequest() + + +@pytest.mark.asyncio +async def test_create_provisioning_config_async( + transport: str = "grpc_asyncio", + request_type=provisioning.CreateProvisioningConfigRequest, +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ProvisioningConfig( + name="name_value", + ticket_id="ticket_id_value", + handover_service_account="handover_service_account_value", + email="email_value", + state=provisioning.ProvisioningConfig.State.DRAFT, + location="location_value", + cloud_console_uri="cloud_console_uri_value", + vpc_sc_enabled=True, + status_message="status_message_value", + custom_id="custom_id_value", + ) + ) + response = await client.create_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.CreateProvisioningConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning.ProvisioningConfig) + assert response.name == "name_value" + assert response.ticket_id == "ticket_id_value" + assert response.handover_service_account == "handover_service_account_value" + assert response.email == "email_value" + assert response.state == provisioning.ProvisioningConfig.State.DRAFT + assert response.location == "location_value" + assert response.cloud_console_uri == "cloud_console_uri_value" + assert response.vpc_sc_enabled is True + assert response.status_message == "status_message_value" + assert response.custom_id == "custom_id_value" + + +@pytest.mark.asyncio +async def test_create_provisioning_config_async_from_dict(): + await test_create_provisioning_config_async(request_type=dict) + + +def test_create_provisioning_config_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = provisioning.CreateProvisioningConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_provisioning_config), "__call__" + ) as call: + call.return_value = provisioning.ProvisioningConfig() + client.create_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_provisioning_config_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = provisioning.CreateProvisioningConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_provisioning_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ProvisioningConfig() + ) + await client.create_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_provisioning_config_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ProvisioningConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_provisioning_config( + parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].provisioning_config + mock_val = provisioning.ProvisioningConfig(name="name_value") + assert arg == mock_val + + +def test_create_provisioning_config_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_provisioning_config( + provisioning.CreateProvisioningConfigRequest(), + parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_provisioning_config_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ProvisioningConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ProvisioningConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_provisioning_config( + parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].provisioning_config + mock_val = provisioning.ProvisioningConfig(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_provisioning_config_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_provisioning_config( + provisioning.CreateProvisioningConfigRequest(), + parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + provisioning.UpdateProvisioningConfigRequest, + dict, + ], +) +def test_update_provisioning_config(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ProvisioningConfig( + name="name_value", + ticket_id="ticket_id_value", + handover_service_account="handover_service_account_value", + email="email_value", + state=provisioning.ProvisioningConfig.State.DRAFT, + location="location_value", + cloud_console_uri="cloud_console_uri_value", + vpc_sc_enabled=True, + status_message="status_message_value", + custom_id="custom_id_value", + ) + response = client.update_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.UpdateProvisioningConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning.ProvisioningConfig) + assert response.name == "name_value" + assert response.ticket_id == "ticket_id_value" + assert response.handover_service_account == "handover_service_account_value" + assert response.email == "email_value" + assert response.state == provisioning.ProvisioningConfig.State.DRAFT + assert response.location == "location_value" + assert response.cloud_console_uri == "cloud_console_uri_value" + assert response.vpc_sc_enabled is True + assert response.status_message == "status_message_value" + assert response.custom_id == "custom_id_value" + + +def test_update_provisioning_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_provisioning_config), "__call__" + ) as call: + client.update_provisioning_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.UpdateProvisioningConfigRequest() + + +@pytest.mark.asyncio +async def test_update_provisioning_config_async( + transport: str = "grpc_asyncio", + request_type=provisioning.UpdateProvisioningConfigRequest, +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ProvisioningConfig( + name="name_value", + ticket_id="ticket_id_value", + handover_service_account="handover_service_account_value", + email="email_value", + state=provisioning.ProvisioningConfig.State.DRAFT, + location="location_value", + cloud_console_uri="cloud_console_uri_value", + vpc_sc_enabled=True, + status_message="status_message_value", + custom_id="custom_id_value", + ) + ) + response = await client.update_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == provisioning.UpdateProvisioningConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning.ProvisioningConfig) + assert response.name == "name_value" + assert response.ticket_id == "ticket_id_value" + assert response.handover_service_account == "handover_service_account_value" + assert response.email == "email_value" + assert response.state == provisioning.ProvisioningConfig.State.DRAFT + assert response.location == "location_value" + assert response.cloud_console_uri == "cloud_console_uri_value" + assert response.vpc_sc_enabled is True + assert response.status_message == "status_message_value" + assert response.custom_id == "custom_id_value" + + +@pytest.mark.asyncio +async def test_update_provisioning_config_async_from_dict(): + await test_update_provisioning_config_async(request_type=dict) + + +def test_update_provisioning_config_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = provisioning.UpdateProvisioningConfigRequest() + + request.provisioning_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_provisioning_config), "__call__" + ) as call: + call.return_value = provisioning.ProvisioningConfig() + client.update_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "provisioning_config.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_provisioning_config_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = provisioning.UpdateProvisioningConfigRequest() + + request.provisioning_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_provisioning_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ProvisioningConfig() + ) + await client.update_provisioning_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "provisioning_config.name=name_value", + ) in kw["metadata"] + + +def test_update_provisioning_config_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ProvisioningConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_provisioning_config( + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].provisioning_config + mock_val = provisioning.ProvisioningConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_provisioning_config_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_provisioning_config( + provisioning.UpdateProvisioningConfigRequest(), + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_provisioning_config_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_provisioning_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = provisioning.ProvisioningConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + provisioning.ProvisioningConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_provisioning_config( + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].provisioning_config + mock_val = provisioning.ProvisioningConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_provisioning_config_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_provisioning_config( + provisioning.UpdateProvisioningConfigRequest(), + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + network.RenameNetworkRequest, + dict, + ], +) +def test_rename_network(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = network.Network( + name="name_value", + id="id_value", + type_=network.Network.Type.CLIENT, + ip_address="ip_address_value", + mac_address=["mac_address_value"], + state=network.Network.State.PROVISIONING, + vlan_id="vlan_id_value", + cidr="cidr_value", + services_cidr="services_cidr_value", + pod="pod_value", + jumbo_frames_enabled=True, + gateway_ip="gateway_ip_value", + ) + response = client.rename_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == network.RenameNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, network.Network) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.type_ == network.Network.Type.CLIENT + assert response.ip_address == "ip_address_value" + assert response.mac_address == ["mac_address_value"] + assert response.state == network.Network.State.PROVISIONING + assert response.vlan_id == "vlan_id_value" + assert response.cidr == "cidr_value" + assert response.services_cidr == "services_cidr_value" + assert response.pod == "pod_value" + assert response.jumbo_frames_enabled is True + assert response.gateway_ip == "gateway_ip_value" + + +def test_rename_network_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_network), "__call__") as call: + client.rename_network() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == network.RenameNetworkRequest() + + +@pytest.mark.asyncio +async def test_rename_network_async( + transport: str = "grpc_asyncio", request_type=network.RenameNetworkRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + network.Network( + name="name_value", + id="id_value", + type_=network.Network.Type.CLIENT, + ip_address="ip_address_value", + mac_address=["mac_address_value"], + state=network.Network.State.PROVISIONING, + vlan_id="vlan_id_value", + cidr="cidr_value", + services_cidr="services_cidr_value", + pod="pod_value", + jumbo_frames_enabled=True, + gateway_ip="gateway_ip_value", + ) + ) + response = await client.rename_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == network.RenameNetworkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, network.Network) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.type_ == network.Network.Type.CLIENT + assert response.ip_address == "ip_address_value" + assert response.mac_address == ["mac_address_value"] + assert response.state == network.Network.State.PROVISIONING + assert response.vlan_id == "vlan_id_value" + assert response.cidr == "cidr_value" + assert response.services_cidr == "services_cidr_value" + assert response.pod == "pod_value" + assert response.jumbo_frames_enabled is True + assert response.gateway_ip == "gateway_ip_value" + + +@pytest.mark.asyncio +async def test_rename_network_async_from_dict(): + await test_rename_network_async(request_type=dict) + + +def test_rename_network_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = network.RenameNetworkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_network), "__call__") as call: + call.return_value = network.Network() + client.rename_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rename_network_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = network.RenameNetworkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_network), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network()) + await client.rename_network(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_rename_network_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = network.Network() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rename_network( + name="name_value", + new_network_id="new_network_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].new_network_id + mock_val = "new_network_id_value" + assert arg == mock_val + + +def test_rename_network_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_network( + network.RenameNetworkRequest(), + name="name_value", + new_network_id="new_network_id_value", + ) + + +@pytest.mark.asyncio +async def test_rename_network_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rename_network), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = network.Network() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rename_network( + name="name_value", + new_network_id="new_network_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].new_network_id + mock_val = "new_network_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_rename_network_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rename_network( + network.RenameNetworkRequest(), + name="name_value", + new_network_id="new_network_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + osimage.ListOSImagesRequest, + dict, + ], +) +def test_list_os_images(request_type, transport: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_os_images), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = osimage.ListOSImagesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_os_images(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == osimage.ListOSImagesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSImagesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_images_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_os_images), "__call__") as call: + client.list_os_images() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == osimage.ListOSImagesRequest() + + +@pytest.mark.asyncio +async def test_list_os_images_async( + transport: str = "grpc_asyncio", request_type=osimage.ListOSImagesRequest +): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_os_images), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + osimage.ListOSImagesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_os_images(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == osimage.ListOSImagesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSImagesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_os_images_async_from_dict(): + await test_list_os_images_async(request_type=dict) + + +def test_list_os_images_field_headers(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = osimage.ListOSImagesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_os_images), "__call__") as call: + call.return_value = osimage.ListOSImagesResponse() + client.list_os_images(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_os_images_field_headers_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = osimage.ListOSImagesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_os_images), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + osimage.ListOSImagesResponse() + ) + await client.list_os_images(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_os_images_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_os_images), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = osimage.ListOSImagesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_os_images( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_os_images_flattened_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_images( + osimage.ListOSImagesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_os_images_flattened_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_os_images), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = osimage.ListOSImagesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + osimage.ListOSImagesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_os_images( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_os_images_flattened_error_async(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_os_images( + osimage.ListOSImagesRequest(), + parent="parent_value", + ) + + +def test_list_os_images_pager(transport_name: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_os_images), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + osimage.OSImage(), + osimage.OSImage(), + ], + next_page_token="abc", + ), + osimage.ListOSImagesResponse( + os_images=[], + next_page_token="def", + ), + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + ], + next_page_token="ghi", + ), + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + osimage.OSImage(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_os_images(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, osimage.OSImage) for i in results) + + +def test_list_os_images_pages(transport_name: str = "grpc"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_os_images), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + osimage.OSImage(), + osimage.OSImage(), + ], + next_page_token="abc", + ), + osimage.ListOSImagesResponse( + os_images=[], + next_page_token="def", + ), + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + ], + next_page_token="ghi", + ), + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + osimage.OSImage(), + ], + ), + RuntimeError, + ) + pages = list(client.list_os_images(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_os_images_async_pager(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_images), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + osimage.OSImage(), + osimage.OSImage(), + ], + next_page_token="abc", + ), + osimage.ListOSImagesResponse( + os_images=[], + next_page_token="def", + ), + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + ], + next_page_token="ghi", + ), + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + osimage.OSImage(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_os_images( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, osimage.OSImage) for i in responses) + + +@pytest.mark.asyncio +async def test_list_os_images_async_pages(): + client = BareMetalSolutionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_images), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + osimage.OSImage(), + osimage.OSImage(), + ], + next_page_token="abc", + ), + osimage.ListOSImagesResponse( + os_images=[], + next_page_token="def", + ), + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + ], + next_page_token="ghi", + ), + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + osimage.OSImage(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_os_images(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + instance.ListInstancesRequest, + dict, + ], +) +def test_list_instances_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = instance.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = instance.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_rest_required_fields( + request_type=instance.ListInstancesRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = instance.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = instance.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_list_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = instance.ListInstancesRequest.pb(instance.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = instance.ListInstancesResponse.to_json( + instance.ListInstancesResponse() + ) + + request = instance.ListInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = instance.ListInstancesResponse() + + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_instances_rest_bad_request( + transport: str = "rest", request_type=instance.ListInstancesRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) + + +def test_list_instances_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = instance.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = instance.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) + + +def test_list_instances_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + instance.ListInstancesRequest(), + parent="parent_value", + ) + + +def test_list_instances_rest_pager(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + instance.ListInstancesResponse( + instances=[ + instance.Instance(), + instance.Instance(), + instance.Instance(), + ], + next_page_token="abc", + ), + instance.ListInstancesResponse( + instances=[], + next_page_token="def", + ), + instance.ListInstancesResponse( + instances=[ + instance.Instance(), + ], + next_page_token="ghi", + ), + instance.ListInstancesResponse( + instances=[ + instance.Instance(), + instance.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(instance.ListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, instance.Instance) for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + instance.GetInstanceRequest, + dict, + ], +) +def test_get_instance_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = instance.Instance( + name="name_value", + id="id_value", + machine_type="machine_type_value", + state=instance.Instance.State.PROVISIONING, + hyperthreading_enabled=True, + interactive_serial_console_enabled=True, + os_image="os_image_value", + pod="pod_value", + network_template="network_template_value", + login_info="login_info_value", + workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, + firmware_version="firmware_version_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = instance.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, instance.Instance) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.machine_type == "machine_type_value" + assert response.state == instance.Instance.State.PROVISIONING + assert response.hyperthreading_enabled is True + assert response.interactive_serial_console_enabled is True + assert response.os_image == "os_image_value" + assert response.pod == "pod_value" + assert response.network_template == "network_template_value" + assert response.login_info == "login_info_value" + assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC + assert response.firmware_version == "firmware_version_value" + + +def test_get_instance_rest_required_fields(request_type=instance.GetInstanceRequest): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = instance.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = instance.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_get_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = instance.GetInstanceRequest.pb(instance.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = instance.Instance.to_json(instance.Instance()) + + request = instance.GetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = instance.Instance() + + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request( + transport: str = "rest", request_type=instance.GetInstanceRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance(request) + + +def test_get_instance_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = instance.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = instance.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) + + +def test_get_instance_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + instance.GetInstanceRequest(), + name="name_value", + ) + + +def test_get_instance_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_instance.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "id": "id_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "machine_type": "machine_type_value", + "state": 1, + "hyperthreading_enabled": True, + "labels": {}, + "luns": [ + { + "name": "name_value", + "id": "id_value", + "state": 1, + "size_gb": 739, + "multiprotocol_type": 1, + "storage_volume": "storage_volume_value", + "shareable": True, + "boot_lun": True, + "storage_type": 1, + "wwid": "wwid_value", + "expire_time": {}, + "instances": ["instances_value1", "instances_value2"], + } + ], + "volumes": [ + { + "name": "name_value", + "id": "id_value", + "storage_type": 1, + "state": 1, + "requested_size_gib": 1917, + "originally_requested_size_gib": 3094, + "current_size_gib": 1710, + "emergency_size_gib": 1898, + "max_size_gib": 1265, + "auto_grown_size_gib": 2032, + "remaining_space_gib": 1974, + "snapshot_reservation_detail": { + "reserved_space_gib": 1884, + "reserved_space_used_percent": 2859, + "reserved_space_remaining_gib": 2933, + "reserved_space_percent": 2331, + }, + "snapshot_auto_delete_behavior": 1, + "labels": {}, + "snapshot_enabled": True, + "pod": "pod_value", + "protocol": 1, + "boot_volume": True, + "performance_tier": 1, + "notes": "notes_value", + "workload_profile": 1, + "expire_time": {}, + "instances": ["instances_value1", "instances_value2"], + "attached": True, + } + ], + "networks": [ + { + "name": "name_value", + "id": "id_value", + "type_": 1, + "ip_address": "ip_address_value", + "mac_address": ["mac_address_value1", "mac_address_value2"], + "state": 1, + "vlan_id": "vlan_id_value", + "cidr": "cidr_value", + "vrf": { + "name": "name_value", + "state": 1, + "qos_policy": {"bandwidth_gbps": 0.1472}, + "vlan_attachments": [ + { + "peer_vlan_id": 1256, + "peer_ip": "peer_ip_value", + "router_ip": "router_ip_value", + "pairing_key": "pairing_key_value", + "qos_policy": {}, + "id": "id_value", + "interconnect_attachment": "interconnect_attachment_value", + } + ], + }, + "labels": {}, + "services_cidr": "services_cidr_value", + "reservations": [ + { + "start_address": "start_address_value", + "end_address": "end_address_value", + "note": "note_value", + } + ], + "pod": "pod_value", + "mount_points": [ + { + "instance": "instance_value", + "logical_interface": "logical_interface_value", + "default_gateway": True, + "ip_address": "ip_address_value", + } + ], + "jumbo_frames_enabled": True, + "gateway_ip": "gateway_ip_value", + } + ], + "interactive_serial_console_enabled": True, + "os_image": "os_image_value", + "pod": "pod_value", + "network_template": "network_template_value", + "logical_interfaces": [ + { + "logical_network_interfaces": [ + { + "network": "network_value", + "ip_address": "ip_address_value", + "default_gateway": True, + "network_type": 1, + "id": "id_value", + } + ], + "name": "name_value", + "interface_index": 1576, + } + ], + "login_info": "login_info_value", + "workload_profile": 1, + "firmware_version": "firmware_version_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_instance_rest_required_fields( + request_type=gcb_instance.UpdateInstanceRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("instance",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_update_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcb_instance.UpdateInstanceRequest.pb( + gcb_instance.UpdateInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = gcb_instance.UpdateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request( + transport: str = "rest", request_type=gcb_instance.UpdateInstanceRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "id": "id_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "machine_type": "machine_type_value", + "state": 1, + "hyperthreading_enabled": True, + "labels": {}, + "luns": [ + { + "name": "name_value", + "id": "id_value", + "state": 1, + "size_gb": 739, + "multiprotocol_type": 1, + "storage_volume": "storage_volume_value", + "shareable": True, + "boot_lun": True, + "storage_type": 1, + "wwid": "wwid_value", + "expire_time": {}, + "instances": ["instances_value1", "instances_value2"], + } + ], + "volumes": [ + { + "name": "name_value", + "id": "id_value", + "storage_type": 1, + "state": 1, + "requested_size_gib": 1917, + "originally_requested_size_gib": 3094, + "current_size_gib": 1710, + "emergency_size_gib": 1898, + "max_size_gib": 1265, + "auto_grown_size_gib": 2032, + "remaining_space_gib": 1974, + "snapshot_reservation_detail": { + "reserved_space_gib": 1884, + "reserved_space_used_percent": 2859, + "reserved_space_remaining_gib": 2933, + "reserved_space_percent": 2331, + }, + "snapshot_auto_delete_behavior": 1, + "labels": {}, + "snapshot_enabled": True, + "pod": "pod_value", + "protocol": 1, + "boot_volume": True, + "performance_tier": 1, + "notes": "notes_value", + "workload_profile": 1, + "expire_time": {}, + "instances": ["instances_value1", "instances_value2"], + "attached": True, + } + ], + "networks": [ + { + "name": "name_value", + "id": "id_value", + "type_": 1, + "ip_address": "ip_address_value", + "mac_address": ["mac_address_value1", "mac_address_value2"], + "state": 1, + "vlan_id": "vlan_id_value", + "cidr": "cidr_value", + "vrf": { + "name": "name_value", + "state": 1, + "qos_policy": {"bandwidth_gbps": 0.1472}, + "vlan_attachments": [ + { + "peer_vlan_id": 1256, + "peer_ip": "peer_ip_value", + "router_ip": "router_ip_value", + "pairing_key": "pairing_key_value", + "qos_policy": {}, + "id": "id_value", + "interconnect_attachment": "interconnect_attachment_value", + } + ], + }, + "labels": {}, + "services_cidr": "services_cidr_value", + "reservations": [ + { + "start_address": "start_address_value", + "end_address": "end_address_value", + "note": "note_value", + } + ], + "pod": "pod_value", + "mount_points": [ + { + "instance": "instance_value", + "logical_interface": "logical_interface_value", + "default_gateway": True, + "ip_address": "ip_address_value", + } + ], + "jumbo_frames_enabled": True, + "gateway_ip": "gateway_ip_value", + } + ], + "interactive_serial_console_enabled": True, + "os_image": "os_image_value", + "pod": "pod_value", + "network_template": "network_template_value", + "logical_interfaces": [ + { + "logical_network_interfaces": [ + { + "network": "network_value", + "ip_address": "ip_address_value", + "default_gateway": True, + "network_type": 1, + "id": "id_value", + } + ], + "name": "name_value", + "interface_index": 1576, + } + ], + "login_info": "login_info_value", + "workload_profile": 1, + "firmware_version": "firmware_version_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_instance(request) + + +def test_update_instance_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + instance=gcb_instance.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{instance.name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) + + +def test_update_instance_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + gcb_instance.UpdateInstanceRequest(), + instance=gcb_instance.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_instance_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + instance.RenameInstanceRequest, + dict, + ], +) +def test_rename_instance_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = instance.Instance( + name="name_value", + id="id_value", + machine_type="machine_type_value", + state=instance.Instance.State.PROVISIONING, + hyperthreading_enabled=True, + interactive_serial_console_enabled=True, + os_image="os_image_value", + pod="pod_value", + network_template="network_template_value", + login_info="login_info_value", + workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, + firmware_version="firmware_version_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = instance.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.rename_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, instance.Instance) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.machine_type == "machine_type_value" + assert response.state == instance.Instance.State.PROVISIONING + assert response.hyperthreading_enabled is True + assert response.interactive_serial_console_enabled is True + assert response.os_image == "os_image_value" + assert response.pod == "pod_value" + assert response.network_template == "network_template_value" + assert response.login_info == "login_info_value" + assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC + assert response.firmware_version == "firmware_version_value" + + +def test_rename_instance_rest_required_fields( + request_type=instance.RenameInstanceRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request_init["new_instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rename_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["newInstanceId"] = "new_instance_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rename_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "newInstanceId" in jsonified_request + assert jsonified_request["newInstanceId"] == "new_instance_id_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = instance.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = instance.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.rename_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_rename_instance_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.rename_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "newInstanceId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rename_instance_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_rename_instance" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_rename_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = instance.RenameInstanceRequest.pb(instance.RenameInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = instance.Instance.to_json(instance.Instance()) + + request = instance.RenameInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = instance.Instance() + + client.rename_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rename_instance_rest_bad_request( + transport: str = "rest", request_type=instance.RenameInstanceRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.rename_instance(request) + + +def test_rename_instance_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = instance.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + new_instance_id="new_instance_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = instance.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.rename_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/instances/*}:rename" + % client.transport._host, + args[1], + ) + + +def test_rename_instance_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_instance( + instance.RenameInstanceRequest(), + name="name_value", + new_instance_id="new_instance_id_value", + ) + + +def test_rename_instance_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + instance.ResetInstanceRequest, + dict, + ], +) +def test_reset_instance_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.reset_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_reset_instance_rest_required_fields( + request_type=instance.ResetInstanceRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reset_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reset_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.reset_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_reset_instance_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.reset_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reset_instance_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_reset_instance" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_reset_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = instance.ResetInstanceRequest.pb(instance.ResetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = instance.ResetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.reset_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reset_instance_rest_bad_request( + transport: str = "rest", request_type=instance.ResetInstanceRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reset_instance(request) + + +def test_reset_instance_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.reset_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/instances/*}:reset" + % client.transport._host, + args[1], + ) + + +def test_reset_instance_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reset_instance( + instance.ResetInstanceRequest(), + name="name_value", + ) + + +def test_reset_instance_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + instance.StartInstanceRequest, + dict, + ], +) +def test_start_instance_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.start_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_start_instance_rest_required_fields( + request_type=instance.StartInstanceRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).start_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).start_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.start_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_start_instance_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.start_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_instance_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_start_instance" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_start_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = instance.StartInstanceRequest.pb(instance.StartInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = instance.StartInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.start_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_instance_rest_bad_request( + transport: str = "rest", request_type=instance.StartInstanceRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_instance(request) + + +def test_start_instance_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.start_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/instances/*}:start" + % client.transport._host, + args[1], + ) + + +def test_start_instance_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_instance( + instance.StartInstanceRequest(), + name="name_value", + ) + + +def test_start_instance_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + instance.StopInstanceRequest, + dict, + ], +) +def test_stop_instance_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.stop_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_stop_instance_rest_required_fields(request_type=instance.StopInstanceRequest): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.stop_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_stop_instance_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.stop_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_instance_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_stop_instance" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_stop_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = instance.StopInstanceRequest.pb(instance.StopInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = instance.StopInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.stop_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_instance_rest_bad_request( + transport: str = "rest", request_type=instance.StopInstanceRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_instance(request) + + +def test_stop_instance_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.stop_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/instances/*}:stop" + % client.transport._host, + args[1], + ) + + +def test_stop_instance_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_instance( + instance.StopInstanceRequest(), + name="name_value", + ) + + +def test_stop_instance_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + instance.EnableInteractiveSerialConsoleRequest, + dict, + ], +) +def test_enable_interactive_serial_console_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enable_interactive_serial_console(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_enable_interactive_serial_console_rest_required_fields( + request_type=instance.EnableInteractiveSerialConsoleRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_interactive_serial_console._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_interactive_serial_console._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.enable_interactive_serial_console(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_enable_interactive_serial_console_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.enable_interactive_serial_console._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_interactive_serial_console_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_enable_interactive_serial_console", + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "pre_enable_interactive_serial_console", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = instance.EnableInteractiveSerialConsoleRequest.pb( + instance.EnableInteractiveSerialConsoleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = instance.EnableInteractiveSerialConsoleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.enable_interactive_serial_console( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_enable_interactive_serial_console_rest_bad_request( + transport: str = "rest", request_type=instance.EnableInteractiveSerialConsoleRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_interactive_serial_console(request) + + +def test_enable_interactive_serial_console_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.enable_interactive_serial_console(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/instances/*}:enableInteractiveSerialConsole" + % client.transport._host, + args[1], + ) + + +def test_enable_interactive_serial_console_rest_flattened_error( + transport: str = "rest", +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.enable_interactive_serial_console( + instance.EnableInteractiveSerialConsoleRequest(), + name="name_value", + ) + + +def test_enable_interactive_serial_console_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + instance.DisableInteractiveSerialConsoleRequest, + dict, + ], +) +def test_disable_interactive_serial_console_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.disable_interactive_serial_console(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_disable_interactive_serial_console_rest_required_fields( + request_type=instance.DisableInteractiveSerialConsoleRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_interactive_serial_console._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_interactive_serial_console._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.disable_interactive_serial_console(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_disable_interactive_serial_console_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.disable_interactive_serial_console._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_disable_interactive_serial_console_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_disable_interactive_serial_console", + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "pre_disable_interactive_serial_console", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = instance.DisableInteractiveSerialConsoleRequest.pb( + instance.DisableInteractiveSerialConsoleRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = instance.DisableInteractiveSerialConsoleRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.disable_interactive_serial_console( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_disable_interactive_serial_console_rest_bad_request( + transport: str = "rest", + request_type=instance.DisableInteractiveSerialConsoleRequest, +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_interactive_serial_console(request) + + +def test_disable_interactive_serial_console_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.disable_interactive_serial_console(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/instances/*}:disableInteractiveSerialConsole" + % client.transport._host, + args[1], + ) + + +def test_disable_interactive_serial_console_rest_flattened_error( + transport: str = "rest", +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.disable_interactive_serial_console( + instance.DisableInteractiveSerialConsoleRequest(), + name="name_value", + ) + + +def test_disable_interactive_serial_console_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_instance.DetachLunRequest, + dict, + ], +) +def test_detach_lun_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.detach_lun(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_detach_lun_rest_required_fields(request_type=gcb_instance.DetachLunRequest): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["instance"] = "" + request_init["lun"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detach_lun._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instance"] = "instance_value" + jsonified_request["lun"] = "lun_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detach_lun._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instance" in jsonified_request + assert jsonified_request["instance"] == "instance_value" + assert "lun" in jsonified_request + assert jsonified_request["lun"] == "lun_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.detach_lun(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_detach_lun_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.detach_lun._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "instance", + "lun", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_lun_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_detach_lun" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_detach_lun" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcb_instance.DetachLunRequest.pb(gcb_instance.DetachLunRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = gcb_instance.DetachLunRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.detach_lun( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detach_lun_rest_bad_request( + transport: str = "rest", request_type=gcb_instance.DetachLunRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_lun(request) + + +def test_detach_lun_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "instance": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + instance="instance_value", + lun="lun_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.detach_lun(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{instance=projects/*/locations/*/instances/*}:detachLun" + % client.transport._host, + args[1], + ) + + +def test_detach_lun_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.detach_lun( + gcb_instance.DetachLunRequest(), + instance="instance_value", + lun="lun_value", + ) + + +def test_detach_lun_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + ssh_key.ListSSHKeysRequest, + dict, + ], +) +def test_list_ssh_keys_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ssh_key.ListSSHKeysResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ssh_key.ListSSHKeysResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_ssh_keys(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSSHKeysPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_ssh_keys_rest_required_fields(request_type=ssh_key.ListSSHKeysRequest): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_ssh_keys._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_ssh_keys._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ssh_key.ListSSHKeysResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ssh_key.ListSSHKeysResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_ssh_keys(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_ssh_keys_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_ssh_keys._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_ssh_keys_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_list_ssh_keys" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_list_ssh_keys" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ssh_key.ListSSHKeysRequest.pb(ssh_key.ListSSHKeysRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ssh_key.ListSSHKeysResponse.to_json( + ssh_key.ListSSHKeysResponse() + ) + + request = ssh_key.ListSSHKeysRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ssh_key.ListSSHKeysResponse() + + client.list_ssh_keys( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_ssh_keys_rest_bad_request( + transport: str = "rest", request_type=ssh_key.ListSSHKeysRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_ssh_keys(request) + + +def test_list_ssh_keys_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ssh_key.ListSSHKeysResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ssh_key.ListSSHKeysResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_ssh_keys(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/locations/*}/sshKeys" % client.transport._host, + args[1], + ) + + +def test_list_ssh_keys_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_ssh_keys( + ssh_key.ListSSHKeysRequest(), + parent="parent_value", + ) + + +def test_list_ssh_keys_rest_pager(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ], + next_page_token="abc", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[], + next_page_token="def", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ], + next_page_token="ghi", + ), + ssh_key.ListSSHKeysResponse( + ssh_keys=[ + ssh_key.SSHKey(), + ssh_key.SSHKey(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(ssh_key.ListSSHKeysResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_ssh_keys(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, ssh_key.SSHKey) for i in results) + + pages = list(client.list_ssh_keys(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_ssh_key.CreateSSHKeyRequest, + dict, + ], +) +def test_create_ssh_key_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["ssh_key"] = {"name": "name_value", "public_key": "public_key_value"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcb_ssh_key.SSHKey( + name="name_value", + public_key="public_key_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcb_ssh_key.SSHKey.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_ssh_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_ssh_key.SSHKey) + assert response.name == "name_value" + assert response.public_key == "public_key_value" + + +def test_create_ssh_key_rest_required_fields( + request_type=gcb_ssh_key.CreateSSHKeyRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["ssh_key_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "sshKeyId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_ssh_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "sshKeyId" in jsonified_request + assert jsonified_request["sshKeyId"] == request_init["ssh_key_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["sshKeyId"] = "ssh_key_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_ssh_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("ssh_key_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "sshKeyId" in jsonified_request + assert jsonified_request["sshKeyId"] == "ssh_key_id_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcb_ssh_key.SSHKey() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcb_ssh_key.SSHKey.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_ssh_key(request) + + expected_params = [ + ( + "sshKeyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_ssh_key_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_ssh_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("sshKeyId",)) + & set( + ( + "parent", + "sshKey", + "sshKeyId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_ssh_key_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_create_ssh_key" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_create_ssh_key" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcb_ssh_key.CreateSSHKeyRequest.pb( + gcb_ssh_key.CreateSSHKeyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcb_ssh_key.SSHKey.to_json(gcb_ssh_key.SSHKey()) + + request = gcb_ssh_key.CreateSSHKeyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcb_ssh_key.SSHKey() + + client.create_ssh_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_ssh_key_rest_bad_request( + transport: str = "rest", request_type=gcb_ssh_key.CreateSSHKeyRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["ssh_key"] = {"name": "name_value", "public_key": "public_key_value"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_ssh_key(request) + + +def test_create_ssh_key_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcb_ssh_key.SSHKey() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ssh_key=gcb_ssh_key.SSHKey(name="name_value"), + ssh_key_id="ssh_key_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcb_ssh_key.SSHKey.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_ssh_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/locations/*}/sshKeys" % client.transport._host, + args[1], + ) + + +def test_create_ssh_key_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_ssh_key( + gcb_ssh_key.CreateSSHKeyRequest(), + parent="parent_value", + ssh_key=gcb_ssh_key.SSHKey(name="name_value"), + ssh_key_id="ssh_key_id_value", + ) + + +def test_create_ssh_key_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + ssh_key.DeleteSSHKeyRequest, + dict, + ], +) +def test_delete_ssh_key_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sshKeys/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_ssh_key(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_ssh_key_rest_required_fields(request_type=ssh_key.DeleteSSHKeyRequest): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_ssh_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_ssh_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_ssh_key(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_ssh_key_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_ssh_key._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_ssh_key_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_delete_ssh_key" + ) as pre: + pre.assert_not_called() + pb_message = ssh_key.DeleteSSHKeyRequest.pb(ssh_key.DeleteSSHKeyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = ssh_key.DeleteSSHKeyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_ssh_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_ssh_key_rest_bad_request( + transport: str = "rest", request_type=ssh_key.DeleteSSHKeyRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/sshKeys/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_ssh_key(request) + + +def test_delete_ssh_key_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/sshKeys/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_ssh_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/sshKeys/*}" % client.transport._host, + args[1], + ) + + +def test_delete_ssh_key_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_ssh_key( + ssh_key.DeleteSSHKeyRequest(), + name="name_value", + ) + + +def test_delete_ssh_key_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + volume.ListVolumesRequest, + dict, + ], +) +def test_list_volumes_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = volume.ListVolumesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = volume.ListVolumesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_volumes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVolumesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesRequest): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_volumes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_volumes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = volume.ListVolumesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = volume.ListVolumesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_volumes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_volumes_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_volumes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_volumes_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_list_volumes" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_list_volumes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = volume.ListVolumesRequest.pb(volume.ListVolumesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = volume.ListVolumesResponse.to_json( + volume.ListVolumesResponse() + ) + + request = volume.ListVolumesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = volume.ListVolumesResponse() + + client.list_volumes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_volumes_rest_bad_request( + transport: str = "rest", request_type=volume.ListVolumesRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_volumes(request) + + +def test_list_volumes_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = volume.ListVolumesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = volume.ListVolumesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_volumes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/locations/*}/volumes" % client.transport._host, + args[1], + ) + + +def test_list_volumes_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_volumes( + volume.ListVolumesRequest(), + parent="parent_value", + ) + + +def test_list_volumes_rest_pager(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + volume.Volume(), + ], + next_page_token="abc", + ), + volume.ListVolumesResponse( + volumes=[], + next_page_token="def", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + ], + next_page_token="ghi", + ), + volume.ListVolumesResponse( + volumes=[ + volume.Volume(), + volume.Volume(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(volume.ListVolumesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_volumes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, volume.Volume) for i in results) + + pages = list(client.list_volumes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + volume.GetVolumeRequest, + dict, + ], +) +def test_get_volume_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = volume.Volume( + name="name_value", + id="id_value", + storage_type=volume.Volume.StorageType.SSD, + state=volume.Volume.State.CREATING, + requested_size_gib=1917, + originally_requested_size_gib=3094, + current_size_gib=1710, + emergency_size_gib=1898, + max_size_gib=1265, + auto_grown_size_gib=2032, + remaining_space_gib=1974, + snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, + snapshot_enabled=True, + pod="pod_value", + protocol=volume.Volume.Protocol.FIBRE_CHANNEL, + boot_volume=True, + performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, + notes="notes_value", + workload_profile=volume.Volume.WorkloadProfile.GENERIC, + instances=["instances_value"], + attached=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = volume.Volume.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_volume(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, volume.Volume) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.storage_type == volume.Volume.StorageType.SSD + assert response.state == volume.Volume.State.CREATING + assert response.requested_size_gib == 1917 + assert response.originally_requested_size_gib == 3094 + assert response.current_size_gib == 1710 + assert response.emergency_size_gib == 1898 + assert response.max_size_gib == 1265 + assert response.auto_grown_size_gib == 2032 + assert response.remaining_space_gib == 1974 + assert ( + response.snapshot_auto_delete_behavior + == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED + ) + assert response.snapshot_enabled is True + assert response.pod == "pod_value" + assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL + assert response.boot_volume is True + assert ( + response.performance_tier + == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED + ) + assert response.notes == "notes_value" + assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC + assert response.instances == ["instances_value"] + assert response.attached is True + + +def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = volume.Volume() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = volume.Volume.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_volume(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_volume_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_volume._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_volume_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_get_volume" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_get_volume" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = volume.GetVolumeRequest.pb(volume.GetVolumeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = volume.Volume.to_json(volume.Volume()) + + request = volume.GetVolumeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = volume.Volume() + + client.get_volume( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_volume_rest_bad_request( + transport: str = "rest", request_type=volume.GetVolumeRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_volume(request) + + +def test_get_volume_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = volume.Volume() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = volume.Volume.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_volume(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/volumes/*}" % client.transport._host, + args[1], + ) + + +def test_get_volume_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_volume( + volume.GetVolumeRequest(), + name="name_value", + ) + + +def test_get_volume_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_volume.UpdateVolumeRequest, + dict, + ], +) +def test_update_volume_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} + } + request_init["volume"] = { + "name": "projects/sample1/locations/sample2/volumes/sample3", + "id": "id_value", + "storage_type": 1, + "state": 1, + "requested_size_gib": 1917, + "originally_requested_size_gib": 3094, + "current_size_gib": 1710, + "emergency_size_gib": 1898, + "max_size_gib": 1265, + "auto_grown_size_gib": 2032, + "remaining_space_gib": 1974, + "snapshot_reservation_detail": { + "reserved_space_gib": 1884, + "reserved_space_used_percent": 2859, + "reserved_space_remaining_gib": 2933, + "reserved_space_percent": 2331, + }, + "snapshot_auto_delete_behavior": 1, + "labels": {}, + "snapshot_enabled": True, + "pod": "pod_value", + "protocol": 1, + "boot_volume": True, + "performance_tier": 1, + "notes": "notes_value", + "workload_profile": 1, + "expire_time": {"seconds": 751, "nanos": 543}, + "instances": ["instances_value1", "instances_value2"], + "attached": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_volume(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_volume_rest_required_fields( + request_type=gcb_volume.UpdateVolumeRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_volume._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_volume(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_volume_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_volume._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("volume",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_volume_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_update_volume" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_update_volume" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcb_volume.UpdateVolumeRequest.pb(gcb_volume.UpdateVolumeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = gcb_volume.UpdateVolumeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_volume( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_volume_rest_bad_request( + transport: str = "rest", request_type=gcb_volume.UpdateVolumeRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} + } + request_init["volume"] = { + "name": "projects/sample1/locations/sample2/volumes/sample3", + "id": "id_value", + "storage_type": 1, + "state": 1, + "requested_size_gib": 1917, + "originally_requested_size_gib": 3094, + "current_size_gib": 1710, + "emergency_size_gib": 1898, + "max_size_gib": 1265, + "auto_grown_size_gib": 2032, + "remaining_space_gib": 1974, + "snapshot_reservation_detail": { + "reserved_space_gib": 1884, + "reserved_space_used_percent": 2859, + "reserved_space_remaining_gib": 2933, + "reserved_space_percent": 2331, + }, + "snapshot_auto_delete_behavior": 1, + "labels": {}, + "snapshot_enabled": True, + "pod": "pod_value", + "protocol": 1, + "boot_volume": True, + "performance_tier": 1, + "notes": "notes_value", + "workload_profile": 1, + "expire_time": {"seconds": 751, "nanos": 543}, + "instances": ["instances_value1", "instances_value2"], + "attached": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_volume(request) + + +def test_update_volume_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + volume=gcb_volume.Volume(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_volume(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{volume.name=projects/*/locations/*/volumes/*}" + % client.transport._host, + args[1], + ) + + +def test_update_volume_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_volume( + gcb_volume.UpdateVolumeRequest(), + volume=gcb_volume.Volume(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_volume_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + volume.RenameVolumeRequest, + dict, + ], +) +def test_rename_volume_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = volume.Volume( + name="name_value", + id="id_value", + storage_type=volume.Volume.StorageType.SSD, + state=volume.Volume.State.CREATING, + requested_size_gib=1917, + originally_requested_size_gib=3094, + current_size_gib=1710, + emergency_size_gib=1898, + max_size_gib=1265, + auto_grown_size_gib=2032, + remaining_space_gib=1974, + snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, + snapshot_enabled=True, + pod="pod_value", + protocol=volume.Volume.Protocol.FIBRE_CHANNEL, + boot_volume=True, + performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, + notes="notes_value", + workload_profile=volume.Volume.WorkloadProfile.GENERIC, + instances=["instances_value"], + attached=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = volume.Volume.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.rename_volume(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, volume.Volume) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.storage_type == volume.Volume.StorageType.SSD + assert response.state == volume.Volume.State.CREATING + assert response.requested_size_gib == 1917 + assert response.originally_requested_size_gib == 3094 + assert response.current_size_gib == 1710 + assert response.emergency_size_gib == 1898 + assert response.max_size_gib == 1265 + assert response.auto_grown_size_gib == 2032 + assert response.remaining_space_gib == 1974 + assert ( + response.snapshot_auto_delete_behavior + == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED + ) + assert response.snapshot_enabled is True + assert response.pod == "pod_value" + assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL + assert response.boot_volume is True + assert ( + response.performance_tier + == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED + ) + assert response.notes == "notes_value" + assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC + assert response.instances == ["instances_value"] + assert response.attached is True + + +def test_rename_volume_rest_required_fields(request_type=volume.RenameVolumeRequest): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request_init["new_volume_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rename_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["newVolumeId"] = "new_volume_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rename_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "newVolumeId" in jsonified_request + assert jsonified_request["newVolumeId"] == "new_volume_id_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = volume.Volume() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = volume.Volume.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.rename_volume(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_rename_volume_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.rename_volume._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "newVolumeId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rename_volume_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_rename_volume" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_rename_volume" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = volume.RenameVolumeRequest.pb(volume.RenameVolumeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = volume.Volume.to_json(volume.Volume()) + + request = volume.RenameVolumeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = volume.Volume() + + client.rename_volume( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rename_volume_rest_bad_request( + transport: str = "rest", request_type=volume.RenameVolumeRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.rename_volume(request) + + +def test_rename_volume_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = volume.Volume() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + new_volume_id="new_volume_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = volume.Volume.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.rename_volume(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/volumes/*}:rename" + % client.transport._host, + args[1], + ) + + +def test_rename_volume_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_volume( + volume.RenameVolumeRequest(), + name="name_value", + new_volume_id="new_volume_id_value", + ) + + +def test_rename_volume_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + volume.EvictVolumeRequest, + dict, + ], +) +def test_evict_volume_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.evict_volume(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_evict_volume_rest_required_fields(request_type=volume.EvictVolumeRequest): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).evict_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).evict_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.evict_volume(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_evict_volume_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.evict_volume._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_evict_volume_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_evict_volume" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_evict_volume" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = volume.EvictVolumeRequest.pb(volume.EvictVolumeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = volume.EvictVolumeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.evict_volume( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_evict_volume_rest_bad_request( + transport: str = "rest", request_type=volume.EvictVolumeRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.evict_volume(request) + + +def test_evict_volume_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.evict_volume(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/volumes/*}:evict" + % client.transport._host, + args[1], + ) + + +def test_evict_volume_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.evict_volume( + volume.EvictVolumeRequest(), + name="name_value", + ) + + +def test_evict_volume_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_volume.ResizeVolumeRequest, + dict, + ], +) +def test_resize_volume_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"volume": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.resize_volume(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_resize_volume_rest_required_fields( + request_type=gcb_volume.ResizeVolumeRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["volume"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resize_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["volume"] = "volume_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resize_volume._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "volume" in jsonified_request + assert jsonified_request["volume"] == "volume_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.resize_volume(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_resize_volume_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.resize_volume._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("volume",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_volume_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_resize_volume" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_resize_volume" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcb_volume.ResizeVolumeRequest.pb(gcb_volume.ResizeVolumeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = gcb_volume.ResizeVolumeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.resize_volume( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_volume_rest_bad_request( + transport: str = "rest", request_type=gcb_volume.ResizeVolumeRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"volume": "projects/sample1/locations/sample2/volumes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize_volume(request) + + +def test_resize_volume_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "volume": "projects/sample1/locations/sample2/volumes/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + volume="volume_value", + size_gib=844, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.resize_volume(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{volume=projects/*/locations/*/volumes/*}:resize" + % client.transport._host, + args[1], + ) + + +def test_resize_volume_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize_volume( + gcb_volume.ResizeVolumeRequest(), + volume="volume_value", + size_gib=844, + ) + + +def test_resize_volume_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + network.ListNetworksRequest, + dict, + ], +) +def test_list_networks_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = network.ListNetworksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = network.ListNetworksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_networks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNetworksPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_networks_rest_required_fields(request_type=network.ListNetworksRequest): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_networks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_networks._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = network.ListNetworksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = network.ListNetworksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_networks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_networks_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_networks._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_networks_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_list_networks" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_list_networks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = network.ListNetworksRequest.pb(network.ListNetworksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = network.ListNetworksResponse.to_json( + network.ListNetworksResponse() + ) + + request = network.ListNetworksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = network.ListNetworksResponse() + + client.list_networks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_networks_rest_bad_request( + transport: str = "rest", request_type=network.ListNetworksRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_networks(request) + + +def test_list_networks_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = network.ListNetworksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = network.ListNetworksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_networks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/locations/*}/networks" % client.transport._host, + args[1], + ) + + +def test_list_networks_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_networks( + network.ListNetworksRequest(), + parent="parent_value", + ) + + +def test_list_networks_rest_pager(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + network.ListNetworksResponse( + networks=[ + network.Network(), + network.Network(), + network.Network(), + ], + next_page_token="abc", + ), + network.ListNetworksResponse( + networks=[], + next_page_token="def", + ), + network.ListNetworksResponse( + networks=[ + network.Network(), + ], + next_page_token="ghi", + ), + network.ListNetworksResponse( + networks=[ + network.Network(), + network.Network(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(network.ListNetworksResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_networks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, network.Network) for i in results) + + pages = list(client.list_networks(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + network.ListNetworkUsageRequest, + dict, + ], +) +def test_list_network_usage_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = network.ListNetworkUsageResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = network.ListNetworkUsageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_network_usage(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, network.ListNetworkUsageResponse) + + +def test_list_network_usage_rest_required_fields( + request_type=network.ListNetworkUsageRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["location"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_network_usage._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["location"] = "location_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_network_usage._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = network.ListNetworkUsageResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = network.ListNetworkUsageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_network_usage(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_network_usage_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_network_usage._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("location",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_network_usage_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_list_network_usage" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_list_network_usage" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = network.ListNetworkUsageRequest.pb( + network.ListNetworkUsageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = network.ListNetworkUsageResponse.to_json( + network.ListNetworkUsageResponse() + ) + + request = network.ListNetworkUsageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = network.ListNetworkUsageResponse() + + client.list_network_usage( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_network_usage_rest_bad_request( + transport: str = "rest", request_type=network.ListNetworkUsageRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_network_usage(request) + + +def test_list_network_usage_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = network.ListNetworkUsageResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"location": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + location="location_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = network.ListNetworkUsageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_network_usage(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{location=projects/*/locations/*}/networks:listNetworkUsage" + % client.transport._host, + args[1], + ) + + +def test_list_network_usage_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_network_usage( + network.ListNetworkUsageRequest(), + location="location_value", + ) + + +def test_list_network_usage_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + network.GetNetworkRequest, + dict, + ], +) +def test_get_network_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/networks/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = network.Network( + name="name_value", + id="id_value", + type_=network.Network.Type.CLIENT, + ip_address="ip_address_value", + mac_address=["mac_address_value"], + state=network.Network.State.PROVISIONING, + vlan_id="vlan_id_value", + cidr="cidr_value", + services_cidr="services_cidr_value", + pod="pod_value", + jumbo_frames_enabled=True, + gateway_ip="gateway_ip_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = network.Network.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_network(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, network.Network) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.type_ == network.Network.Type.CLIENT + assert response.ip_address == "ip_address_value" + assert response.mac_address == ["mac_address_value"] + assert response.state == network.Network.State.PROVISIONING + assert response.vlan_id == "vlan_id_value" + assert response.cidr == "cidr_value" + assert response.services_cidr == "services_cidr_value" + assert response.pod == "pod_value" + assert response.jumbo_frames_enabled is True + assert response.gateway_ip == "gateway_ip_value" + + +def test_get_network_rest_required_fields(request_type=network.GetNetworkRequest): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_network._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_network._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = network.Network() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = network.Network.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_network(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_network_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_network._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_network_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_get_network" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_get_network" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = network.GetNetworkRequest.pb(network.GetNetworkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = network.Network.to_json(network.Network()) + + request = network.GetNetworkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = network.Network() + + client.get_network( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_network_rest_bad_request( + transport: str = "rest", request_type=network.GetNetworkRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/networks/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_network(request) + + +def test_get_network_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = network.Network() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/networks/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = network.Network.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_network(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/networks/*}" % client.transport._host, + args[1], + ) + + +def test_get_network_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_network( + network.GetNetworkRequest(), + name="name_value", + ) + + +def test_get_network_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_network.UpdateNetworkRequest, + dict, + ], +) +def test_update_network_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "network": {"name": "projects/sample1/locations/sample2/networks/sample3"} + } + request_init["network"] = { + "name": "projects/sample1/locations/sample2/networks/sample3", + "id": "id_value", + "type_": 1, + "ip_address": "ip_address_value", + "mac_address": ["mac_address_value1", "mac_address_value2"], + "state": 1, + "vlan_id": "vlan_id_value", + "cidr": "cidr_value", + "vrf": { + "name": "name_value", + "state": 1, + "qos_policy": {"bandwidth_gbps": 0.1472}, + "vlan_attachments": [ + { + "peer_vlan_id": 1256, + "peer_ip": "peer_ip_value", + "router_ip": "router_ip_value", + "pairing_key": "pairing_key_value", + "qos_policy": {}, + "id": "id_value", + "interconnect_attachment": "interconnect_attachment_value", + } + ], + }, + "labels": {}, + "services_cidr": "services_cidr_value", + "reservations": [ + { + "start_address": "start_address_value", + "end_address": "end_address_value", + "note": "note_value", + } + ], + "pod": "pod_value", + "mount_points": [ + { + "instance": "instance_value", + "logical_interface": "logical_interface_value", + "default_gateway": True, + "ip_address": "ip_address_value", + } + ], + "jumbo_frames_enabled": True, + "gateway_ip": "gateway_ip_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_network(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_network_rest_required_fields( + request_type=gcb_network.UpdateNetworkRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_network._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_network._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_network(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_network_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_network._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("network",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_network_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), + ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_update_network" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_update_network" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcb_network.UpdateNetworkRequest.pb( + gcb_network.UpdateNetworkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = gcb_network.UpdateNetworkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_network( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_network_rest_bad_request( + transport: str = "rest", request_type=gcb_network.UpdateNetworkRequest +): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "network": {"name": "projects/sample1/locations/sample2/networks/sample3"} + } + request_init["network"] = { + "name": "projects/sample1/locations/sample2/networks/sample3", + "id": "id_value", + "type_": 1, + "ip_address": "ip_address_value", + "mac_address": ["mac_address_value1", "mac_address_value2"], + "state": 1, + "vlan_id": "vlan_id_value", + "cidr": "cidr_value", + "vrf": { + "name": "name_value", + "state": 1, + "qos_policy": {"bandwidth_gbps": 0.1472}, + "vlan_attachments": [ + { + "peer_vlan_id": 1256, + "peer_ip": "peer_ip_value", + "router_ip": "router_ip_value", + "pairing_key": "pairing_key_value", + "qos_policy": {}, + "id": "id_value", + "interconnect_attachment": "interconnect_attachment_value", + } + ], + }, + "labels": {}, + "services_cidr": "services_cidr_value", + "reservations": [ + { + "start_address": "start_address_value", + "end_address": "end_address_value", + "note": "note_value", + } + ], + "pod": "pod_value", + "mount_points": [ + { + "instance": "instance_value", + "logical_interface": "logical_interface_value", + "default_gateway": True, + "ip_address": "ip_address_value", + } + ], + "jumbo_frames_enabled": True, + "gateway_ip": "gateway_ip_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_network(request) + + +def test_update_network_rest_flattened(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "network": {"name": "projects/sample1/locations/sample2/networks/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + network=gcb_network.Network(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_network(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{network.name=projects/*/locations/*/networks/*}" + % client.transport._host, + args[1], + ) + + +def test_update_network_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_network( + gcb_network.UpdateNetworkRequest(), + network=gcb_network.Network(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_network_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcb_volume_snapshot.CreateVolumeSnapshotRequest, + dict, + ], +) +def test_create_volume_snapshot_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init["volume_snapshot"] = { + "name": "name_value", + "id": "id_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "storage_volume": "storage_volume_value", + "type_": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcb_volume_snapshot.VolumeSnapshot( + name="name_value", + id="id_value", + description="description_value", + storage_volume="storage_volume_value", + type_=gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcb_volume_snapshot.VolumeSnapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_volume_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_volume_snapshot.VolumeSnapshot) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.description == "description_value" + assert response.storage_volume == "storage_volume_value" + assert response.type_ == gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC + + +def test_create_volume_snapshot_rest_required_fields( + request_type=gcb_volume_snapshot.CreateVolumeSnapshotRequest, +): + transport_class = transports.BareMetalSolutionRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_volume_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_volume_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcb_volume_snapshot.VolumeSnapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcb_volume_snapshot.VolumeSnapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_volume_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "nfs_share.name=name_value", - ) in kw["metadata"] +def test_create_volume_snapshot_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_volume_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "volumeSnapshot", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_nfs_share_field_headers_async(): - client = BareMetalSolutionAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_volume_snapshot_rest_interceptors(null_interceptor): + transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BareMetalSolutionRestInterceptor(), ) + client = BareMetalSolutionClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_create_volume_snapshot" + ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "pre_create_volume_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcb_volume_snapshot.CreateVolumeSnapshotRequest.pb( + gcb_volume_snapshot.CreateVolumeSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_nfs_share.UpdateNfsShareRequest() + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcb_volume_snapshot.VolumeSnapshot.to_json( + gcb_volume_snapshot.VolumeSnapshot() + ) - request.nfs_share.name = "name_value" + request = gcb_volume_snapshot.CreateVolumeSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcb_volume_snapshot.VolumeSnapshot() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + client.create_volume_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - await client.update_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "nfs_share.name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_update_nfs_share_flattened(): +def test_create_volume_snapshot_rest_bad_request( + transport: str = "rest", + request_type=gcb_volume_snapshot.CreateVolumeSnapshotRequest, +): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_nfs_share( - nfs_share=gcb_nfs_share.NfsShare(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init["volume_snapshot"] = { + "name": "name_value", + "id": "id_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "storage_volume": "storage_volume_value", + "type_": 1, + } + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].nfs_share - mock_val = gcb_nfs_share.NfsShare(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_volume_snapshot(request) -def test_update_nfs_share_flattened_error(): +def test_create_volume_snapshot_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_nfs_share( - gcb_nfs_share.UpdateNfsShareRequest(), - nfs_share=gcb_nfs_share.NfsShare(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcb_volume_snapshot.VolumeSnapshot() + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } -@pytest.mark.asyncio -async def test_update_nfs_share_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name="name_value"), + ) + mock_args.update(sample_request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_nfs_share), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcb_volume_snapshot.VolumeSnapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_nfs_share( - nfs_share=gcb_nfs_share.NfsShare(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + client.create_volume_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].nfs_share - mock_val = gcb_nfs_share.NfsShare(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/locations/*/volumes/*}/snapshots" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_update_nfs_share_flattened_error_async(): - client = BareMetalSolutionAsyncClient( +def test_create_volume_snapshot_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_nfs_share( - gcb_nfs_share.UpdateNfsShareRequest(), - nfs_share=gcb_nfs_share.NfsShare(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_volume_snapshot( + gcb_volume_snapshot.CreateVolumeSnapshotRequest(), + parent="parent_value", + volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name="name_value"), ) +def test_create_volume_snapshot_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ - instance.ListInstancesRequest, + gcb_volume_snapshot.RestoreVolumeSnapshotRequest, dict, ], ) -def test_list_instances_rest(request_type): +def test_restore_volume_snapshot_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "volume_snapshot": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = instance.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = instance.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_instances(request) + response = client.restore_volume_snapshot(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_instances_rest_required_fields( - request_type=instance.ListInstancesRequest, +def test_restore_volume_snapshot_rest_required_fields( + request_type=gcb_volume_snapshot.RestoreVolumeSnapshotRequest, ): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["parent"] = "" + request_init["volume_snapshot"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6585,29 +20883,21 @@ def test_list_instances_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_instances._get_unset_required_fields(jsonified_request) + ).restore_volume_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["volumeSnapshot"] = "volume_snapshot_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_instances._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).restore_volume_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "volumeSnapshot" in jsonified_request + assert jsonified_request["volumeSnapshot"] == "volume_snapshot_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6616,7 +20906,7 @@ def test_list_instances_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = instance.ListInstancesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6628,47 +20918,37 @@ def test_list_instances_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - pb_return_value = instance.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_instances(request) + response = client.restore_volume_snapshot(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_instances_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) +def test_restore_volume_snapshot_rest_unset_required_fields(): + transport = transports.BareMetalSolutionRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.restore_volume_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("volumeSnapshot",))) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instances_rest_interceptors(null_interceptor): +def test_restore_volume_snapshot_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6681,13 +20961,17 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_list_instances" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_restore_volume_snapshot" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_list_instances" + transports.BareMetalSolutionRestInterceptor, "pre_restore_volume_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = instance.ListInstancesRequest.pb(instance.ListInstancesRequest()) + pb_message = gcb_volume_snapshot.RestoreVolumeSnapshotRequest.pb( + gcb_volume_snapshot.RestoreVolumeSnapshotRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6698,19 +20982,19 @@ def test_list_instances_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = instance.ListInstancesResponse.to_json( - instance.ListInstancesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = instance.ListInstancesRequest() + request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = instance.ListInstancesResponse() + post.return_value = operations_pb2.Operation() - client.list_instances( + client.restore_volume_snapshot( request, metadata=[ ("key", "val"), @@ -6722,8 +21006,9 @@ def test_list_instances_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_instances_rest_bad_request( - transport: str = "rest", request_type=instance.ListInstancesRequest +def test_restore_volume_snapshot_rest_bad_request( + transport: str = "rest", + request_type=gcb_volume_snapshot.RestoreVolumeSnapshotRequest, ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6731,7 +21016,9 @@ def test_list_instances_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "volume_snapshot": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6743,10 +21030,10 @@ def test_list_instances_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_instances(request) + client.restore_volume_snapshot(request) -def test_list_instances_rest_flattened(): +def test_restore_volume_snapshot_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6755,38 +21042,40 @@ def test_list_instances_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = instance.ListInstancesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "volume_snapshot": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + volume_snapshot="volume_snapshot_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = instance.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_instances(**mock_args) + client.restore_volume_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{parent=projects/*/locations/*}/instances" % client.transport._host, + "%s/v2/{volume_snapshot=projects/*/locations/*/volumes/*/snapshots/*}:restoreVolumeSnapshot" + % client.transport._host, args[1], ) -def test_list_instances_rest_flattened_error(transport: str = "rest"): +def test_restore_volume_snapshot_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6795,129 +21084,58 @@ def test_list_instances_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_instances( - instance.ListInstancesRequest(), - parent="parent_value", + client.restore_volume_snapshot( + gcb_volume_snapshot.RestoreVolumeSnapshotRequest(), + volume_snapshot="volume_snapshot_value", ) -def test_list_instances_rest_pager(transport: str = "rest"): +def test_restore_volume_snapshot_rest_error(): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - instance.Instance(), - ], - next_page_token="abc", - ), - instance.ListInstancesResponse( - instances=[], - next_page_token="def", - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - ], - next_page_token="ghi", - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(instance.ListInstancesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_instances(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, instance.Instance) for i in results) - - pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - instance.GetInstanceRequest, + volume_snapshot.DeleteVolumeSnapshotRequest, dict, ], ) -def test_get_instance_rest(request_type): +def test_delete_volume_snapshot_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = instance.Instance( - name="name_value", - id="id_value", - machine_type="machine_type_value", - state=instance.Instance.State.PROVISIONING, - hyperthreading_enabled=True, - interactive_serial_console_enabled=True, - os_image="os_image_value", - pod="pod_value", - network_template="network_template_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = instance.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_instance(request) + response = client.delete_volume_snapshot(request) # Establish that the response is the type that we expect. - assert isinstance(response, instance.Instance) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.machine_type == "machine_type_value" - assert response.state == instance.Instance.State.PROVISIONING - assert response.hyperthreading_enabled is True - assert response.interactive_serial_console_enabled is True - assert response.os_image == "os_image_value" - assert response.pod == "pod_value" - assert response.network_template == "network_template_value" + assert response is None -def test_get_instance_rest_required_fields(request_type=instance.GetInstanceRequest): +def test_delete_volume_snapshot_rest_required_fields( + request_type=volume_snapshot.DeleteVolumeSnapshotRequest, +): transport_class = transports.BareMetalSolutionRestTransport request_init = {} @@ -6936,7 +21154,7 @@ def test_get_instance_rest_required_fields(request_type=instance.GetInstanceRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_instance._get_unset_required_fields(jsonified_request) + ).delete_volume_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6945,7 +21163,7 @@ def test_get_instance_rest_required_fields(request_type=instance.GetInstanceRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_instance._get_unset_required_fields(jsonified_request) + ).delete_volume_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -6959,7 +21177,7 @@ def test_get_instance_rest_required_fields(request_type=instance.GetInstanceRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = instance.Instance() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6971,38 +21189,36 @@ def test_get_instance_rest_required_fields(request_type=instance.GetInstanceRequ pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - pb_return_value = instance.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_instance(request) + response = client.delete_volume_snapshot(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_instance_rest_unset_required_fields(): +def test_delete_volume_snapshot_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_instance._get_unset_required_fields({}) + unset_fields = transport.delete_volume_snapshot._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_rest_interceptors(null_interceptor): +def test_delete_volume_snapshot_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7015,13 +21231,12 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_get_instance" - ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_get_instance" + transports.BareMetalSolutionRestInterceptor, "pre_delete_volume_snapshot" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = instance.GetInstanceRequest.pb(instance.GetInstanceRequest()) + pb_message = volume_snapshot.DeleteVolumeSnapshotRequest.pb( + volume_snapshot.DeleteVolumeSnapshotRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7032,17 +21247,15 @@ def test_get_instance_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = instance.Instance.to_json(instance.Instance()) - request = instance.GetInstanceRequest() + request = volume_snapshot.DeleteVolumeSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = instance.Instance() - client.get_instance( + client.delete_volume_snapshot( request, metadata=[ ("key", "val"), @@ -7051,11 +21264,10 @@ def test_get_instance_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_instance_rest_bad_request( - transport: str = "rest", request_type=instance.GetInstanceRequest +def test_delete_volume_snapshot_rest_bad_request( + transport: str = "rest", request_type=volume_snapshot.DeleteVolumeSnapshotRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7063,7 +21275,9 @@ def test_get_instance_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7075,10 +21289,10 @@ def test_get_instance_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_instance(request) + client.delete_volume_snapshot(request) -def test_get_instance_rest_flattened(): +def test_delete_volume_snapshot_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7087,11 +21301,11 @@ def test_get_instance_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = instance.Instance() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" } # get truthy value for each flattened field @@ -7103,24 +21317,24 @@ def test_get_instance_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = instance.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_instance(**mock_args) + client.delete_volume_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/locations/*/instances/*}" % client.transport._host, + "%s/v2/{name=projects/*/locations/*/volumes/*/snapshots/*}" + % client.transport._host, args[1], ) -def test_get_instance_rest_flattened_error(transport: str = "rest"): +def test_delete_volume_snapshot_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7129,13 +21343,13 @@ def test_get_instance_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_instance( - instance.GetInstanceRequest(), + client.delete_volume_snapshot( + volume_snapshot.DeleteVolumeSnapshotRequest(), name="name_value", ) -def test_get_instance_rest_error(): +def test_delete_volume_snapshot_rest_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7144,122 +21358,59 @@ def test_get_instance_rest_error(): @pytest.mark.parametrize( "request_type", [ - gcb_instance.UpdateInstanceRequest, + volume_snapshot.GetVolumeSnapshotRequest, dict, ], ) -def test_update_instance_rest(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} - } - request_init["instance"] = { - "name": "projects/sample1/locations/sample2/instances/sample3", - "id": "id_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "machine_type": "machine_type_value", - "state": 1, - "hyperthreading_enabled": True, - "labels": {}, - "luns": [ - { - "name": "name_value", - "id": "id_value", - "state": 1, - "size_gb": 739, - "multiprotocol_type": 1, - "storage_volume": "storage_volume_value", - "shareable": True, - "boot_lun": True, - "storage_type": 1, - "wwid": "wwid_value", - } - ], - "networks": [ - { - "name": "name_value", - "id": "id_value", - "type_": 1, - "ip_address": "ip_address_value", - "mac_address": ["mac_address_value1", "mac_address_value2"], - "state": 1, - "vlan_id": "vlan_id_value", - "cidr": "cidr_value", - "vrf": { - "name": "name_value", - "state": 1, - "qos_policy": {"bandwidth_gbps": 0.1472}, - "vlan_attachments": [ - { - "peer_vlan_id": 1256, - "peer_ip": "peer_ip_value", - "router_ip": "router_ip_value", - } - ], - }, - "labels": {}, - "services_cidr": "services_cidr_value", - "reservations": [ - { - "start_address": "start_address_value", - "end_address": "end_address_value", - "note": "note_value", - } - ], - } - ], - "interactive_serial_console_enabled": True, - "os_image": "os_image_value", - "pod": "pod_value", - "network_template": "network_template_value", - "logical_interfaces": [ - { - "logical_network_interfaces": [ - { - "network": "network_value", - "ip_address": "ip_address_value", - "default_gateway": True, - "network_type": 1, - "id": "id_value", - } - ], - "name": "name_value", - "interface_index": 1576, - } - ], +def test_get_volume_snapshot_rest(request_type): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = volume_snapshot.VolumeSnapshot( + name="name_value", + id="id_value", + description="description_value", + storage_volume="storage_volume_value", + type_=volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = volume_snapshot.VolumeSnapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_instance(request) + response = client.get_volume_snapshot(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, volume_snapshot.VolumeSnapshot) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.description == "description_value" + assert response.storage_volume == "storage_volume_value" + assert response.type_ == volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC -def test_update_instance_rest_required_fields( - request_type=gcb_instance.UpdateInstanceRequest, +def test_get_volume_snapshot_rest_required_fields( + request_type=volume_snapshot.GetVolumeSnapshotRequest, ): transport_class = transports.BareMetalSolutionRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7274,19 +21425,21 @@ def test_update_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_instance._get_unset_required_fields(jsonified_request) + ).get_volume_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).get_volume_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7295,7 +21448,7 @@ def test_update_instance_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = volume_snapshot.VolumeSnapshot() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7307,37 +21460,38 @@ def test_update_instance_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = volume_snapshot.VolumeSnapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_instance(request) + response = client.get_volume_snapshot(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_instance_rest_unset_required_fields(): +def test_get_volume_snapshot_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("instance",))) + unset_fields = transport.get_volume_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_rest_interceptors(null_interceptor): +def test_get_volume_snapshot_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7350,16 +21504,14 @@ def test_update_instance_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_update_instance" + transports.BareMetalSolutionRestInterceptor, "post_get_volume_snapshot" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_update_instance" + transports.BareMetalSolutionRestInterceptor, "pre_get_volume_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcb_instance.UpdateInstanceRequest.pb( - gcb_instance.UpdateInstanceRequest() + pb_message = volume_snapshot.GetVolumeSnapshotRequest.pb( + volume_snapshot.GetVolumeSnapshotRequest() ) transcode.return_value = { "method": "post", @@ -7371,19 +21523,19 @@ def test_update_instance_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = volume_snapshot.VolumeSnapshot.to_json( + volume_snapshot.VolumeSnapshot() ) - request = gcb_instance.UpdateInstanceRequest() + request = volume_snapshot.GetVolumeSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = volume_snapshot.VolumeSnapshot() - client.update_instance( + client.get_volume_snapshot( request, metadata=[ ("key", "val"), @@ -7395,8 +21547,8 @@ def test_update_instance_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_instance_rest_bad_request( - transport: str = "rest", request_type=gcb_instance.UpdateInstanceRequest +def test_get_volume_snapshot_rest_bad_request( + transport: str = "rest", request_type=volume_snapshot.GetVolumeSnapshotRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7405,83 +21557,7 @@ def test_update_instance_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} - } - request_init["instance"] = { - "name": "projects/sample1/locations/sample2/instances/sample3", - "id": "id_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "machine_type": "machine_type_value", - "state": 1, - "hyperthreading_enabled": True, - "labels": {}, - "luns": [ - { - "name": "name_value", - "id": "id_value", - "state": 1, - "size_gb": 739, - "multiprotocol_type": 1, - "storage_volume": "storage_volume_value", - "shareable": True, - "boot_lun": True, - "storage_type": 1, - "wwid": "wwid_value", - } - ], - "networks": [ - { - "name": "name_value", - "id": "id_value", - "type_": 1, - "ip_address": "ip_address_value", - "mac_address": ["mac_address_value1", "mac_address_value2"], - "state": 1, - "vlan_id": "vlan_id_value", - "cidr": "cidr_value", - "vrf": { - "name": "name_value", - "state": 1, - "qos_policy": {"bandwidth_gbps": 0.1472}, - "vlan_attachments": [ - { - "peer_vlan_id": 1256, - "peer_ip": "peer_ip_value", - "router_ip": "router_ip_value", - } - ], - }, - "labels": {}, - "services_cidr": "services_cidr_value", - "reservations": [ - { - "start_address": "start_address_value", - "end_address": "end_address_value", - "note": "note_value", - } - ], - } - ], - "interactive_serial_console_enabled": True, - "os_image": "os_image_value", - "pod": "pod_value", - "network_template": "network_template_value", - "logical_interfaces": [ - { - "logical_network_interfaces": [ - { - "network": "network_value", - "ip_address": "ip_address_value", - "default_gateway": True, - "network_type": 1, - "id": "id_value", - } - ], - "name": "name_value", - "interface_index": 1576, - } - ], + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" } request = request_type(**request_init) @@ -7494,10 +21570,10 @@ def test_update_instance_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_instance(request) + client.get_volume_snapshot(request) -def test_update_instance_rest_flattened(): +def test_get_volume_snapshot_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7506,41 +21582,41 @@ def test_update_instance_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = volume_snapshot.VolumeSnapshot() # get arguments that satisfy an http rule for this method sample_request = { - "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + "name": "projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4" } # get truthy value for each flattened field mock_args = dict( - instance=gcb_instance.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = volume_snapshot.VolumeSnapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_instance(**mock_args) + client.get_volume_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{instance.name=projects/*/locations/*/instances/*}" + "%s/v2/{name=projects/*/locations/*/volumes/*/snapshots/*}" % client.transport._host, args[1], ) -def test_update_instance_rest_flattened_error(transport: str = "rest"): +def test_get_volume_snapshot_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7549,14 +21625,13 @@ def test_update_instance_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_instance( - gcb_instance.UpdateInstanceRequest(), - instance=gcb_instance.Instance(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_volume_snapshot( + volume_snapshot.GetVolumeSnapshotRequest(), + name="name_value", ) -def test_update_instance_rest_error(): +def test_get_volume_snapshot_rest_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7565,45 +21640,51 @@ def test_update_instance_rest_error(): @pytest.mark.parametrize( "request_type", [ - instance.ResetInstanceRequest, + volume_snapshot.ListVolumeSnapshotsRequest, dict, ], ) -def test_reset_instance_rest(request_type): +def test_list_volume_snapshots_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = volume_snapshot.ListVolumeSnapshotsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = volume_snapshot.ListVolumeSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.reset_instance(request) + response = client.list_volume_snapshots(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListVolumeSnapshotsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_reset_instance_rest_required_fields( - request_type=instance.ResetInstanceRequest, +def test_list_volume_snapshots_rest_required_fields( + request_type=volume_snapshot.ListVolumeSnapshotsRequest, ): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7618,21 +21699,28 @@ def test_reset_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).reset_instance._get_unset_required_fields(jsonified_request) + ).list_volume_snapshots._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).reset_instance._get_unset_required_fields(jsonified_request) + ).list_volume_snapshots._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7641,7 +21729,7 @@ def test_reset_instance_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = volume_snapshot.ListVolumeSnapshotsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7653,37 +21741,48 @@ def test_reset_instance_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = volume_snapshot.ListVolumeSnapshotsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.reset_instance(request) + response = client.list_volume_snapshots(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_reset_instance_rest_unset_required_fields(): +def test_list_volume_snapshots_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.reset_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_volume_snapshots._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reset_instance_rest_interceptors(null_interceptor): +def test_list_volume_snapshots_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7696,15 +21795,15 @@ def test_reset_instance_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_reset_instance" + transports.BareMetalSolutionRestInterceptor, "post_list_volume_snapshots" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_reset_instance" + transports.BareMetalSolutionRestInterceptor, "pre_list_volume_snapshots" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = instance.ResetInstanceRequest.pb(instance.ResetInstanceRequest()) + pb_message = volume_snapshot.ListVolumeSnapshotsRequest.pb( + volume_snapshot.ListVolumeSnapshotsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7715,19 +21814,19 @@ def test_reset_instance_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = volume_snapshot.ListVolumeSnapshotsResponse.to_json( + volume_snapshot.ListVolumeSnapshotsResponse() ) - request = instance.ResetInstanceRequest() + request = volume_snapshot.ListVolumeSnapshotsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = volume_snapshot.ListVolumeSnapshotsResponse() - client.reset_instance( + client.list_volume_snapshots( request, metadata=[ ("key", "val"), @@ -7739,8 +21838,8 @@ def test_reset_instance_rest_interceptors(null_interceptor): post.assert_called_once() -def test_reset_instance_rest_bad_request( - transport: str = "rest", request_type=instance.ResetInstanceRequest +def test_list_volume_snapshots_rest_bad_request( + transport: str = "rest", request_type=volume_snapshot.ListVolumeSnapshotsRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7748,7 +21847,7 @@ def test_reset_instance_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7760,10 +21859,10 @@ def test_reset_instance_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.reset_instance(request) + client.list_volume_snapshots(request) -def test_reset_instance_rest_flattened(): +def test_list_volume_snapshots_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7772,98 +21871,182 @@ def test_reset_instance_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = volume_snapshot.ListVolumeSnapshotsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" + "parent": "projects/sample1/locations/sample2/volumes/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = volume_snapshot.ListVolumeSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.reset_instance(**mock_args) + client.list_volume_snapshots(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/locations/*/instances/*}:reset" + "%s/v2/{parent=projects/*/locations/*/volumes/*}/snapshots" % client.transport._host, args[1], ) -def test_reset_instance_rest_flattened_error(transport: str = "rest"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_list_volume_snapshots_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_volume_snapshots( + volume_snapshot.ListVolumeSnapshotsRequest(), + parent="parent_value", + ) + + +def test_list_volume_snapshots_rest_pager(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + ], + next_page_token="abc", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[], + next_page_token="def", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + ], + next_page_token="ghi", + ), + volume_snapshot.ListVolumeSnapshotsResponse( + volume_snapshots=[ + volume_snapshot.VolumeSnapshot(), + volume_snapshot.VolumeSnapshot(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + volume_snapshot.ListVolumeSnapshotsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.reset_instance( - instance.ResetInstanceRequest(), - name="name_value", - ) + pager = client.list_volume_snapshots(request=sample_request) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, volume_snapshot.VolumeSnapshot) for i in results) -def test_reset_instance_rest_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pages = list(client.list_volume_snapshots(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - instance.StartInstanceRequest, + lun.GetLunRequest, dict, ], ) -def test_start_instance_rest(request_type): +def test_get_lun_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/luns/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = lun.Lun( + name="name_value", + id="id_value", + state=lun.Lun.State.CREATING, + size_gb=739, + multiprotocol_type=lun.Lun.MultiprotocolType.LINUX, + storage_volume="storage_volume_value", + shareable=True, + boot_lun=True, + storage_type=lun.Lun.StorageType.SSD, + wwid="wwid_value", + instances=["instances_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = lun.Lun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.start_instance(request) + response = client.get_lun(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, lun.Lun) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.state == lun.Lun.State.CREATING + assert response.size_gb == 739 + assert response.multiprotocol_type == lun.Lun.MultiprotocolType.LINUX + assert response.storage_volume == "storage_volume_value" + assert response.shareable is True + assert response.boot_lun is True + assert response.storage_type == lun.Lun.StorageType.SSD + assert response.wwid == "wwid_value" + assert response.instances == ["instances_value"] -def test_start_instance_rest_required_fields( - request_type=instance.StartInstanceRequest, -): +def test_get_lun_rest_required_fields(request_type=lun.GetLunRequest): transport_class = transports.BareMetalSolutionRestTransport request_init = {} @@ -7882,7 +22065,7 @@ def test_start_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).start_instance._get_unset_required_fields(jsonified_request) + ).get_lun._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7891,7 +22074,7 @@ def test_start_instance_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).start_instance._get_unset_required_fields(jsonified_request) + ).get_lun._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7905,7 +22088,7 @@ def test_start_instance_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = lun.Lun() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7917,37 +22100,38 @@ def test_start_instance_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = lun.Lun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.start_instance(request) + response = client.get_lun(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_start_instance_rest_unset_required_fields(): +def test_get_lun_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.start_instance._get_unset_required_fields({}) + unset_fields = transport.get_lun._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_start_instance_rest_interceptors(null_interceptor): +def test_get_lun_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7960,15 +22144,13 @@ def test_start_instance_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_start_instance" + transports.BareMetalSolutionRestInterceptor, "post_get_lun" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_start_instance" + transports.BareMetalSolutionRestInterceptor, "pre_get_lun" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = instance.StartInstanceRequest.pb(instance.StartInstanceRequest()) + pb_message = lun.GetLunRequest.pb(lun.GetLunRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7979,19 +22161,17 @@ def test_start_instance_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = lun.Lun.to_json(lun.Lun()) - request = instance.StartInstanceRequest() + request = lun.GetLunRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = lun.Lun() - client.start_instance( + client.get_lun( request, metadata=[ ("key", "val"), @@ -8003,8 +22183,8 @@ def test_start_instance_rest_interceptors(null_interceptor): post.assert_called_once() -def test_start_instance_rest_bad_request( - transport: str = "rest", request_type=instance.StartInstanceRequest +def test_get_lun_rest_bad_request( + transport: str = "rest", request_type=lun.GetLunRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8012,7 +22192,9 @@ def test_start_instance_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/luns/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8024,10 +22206,10 @@ def test_start_instance_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.start_instance(request) + client.get_lun(request) -def test_start_instance_rest_flattened(): +def test_get_lun_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8036,11 +22218,11 @@ def test_start_instance_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = lun.Lun() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" + "name": "projects/sample1/locations/sample2/volumes/sample3/luns/sample4" } # get truthy value for each flattened field @@ -8052,24 +22234,25 @@ def test_start_instance_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = lun.Lun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.start_instance(**mock_args) + client.get_lun(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/locations/*/instances/*}:start" + "%s/v2/{name=projects/*/locations/*/volumes/*/luns/*}" % client.transport._host, args[1], ) -def test_start_instance_rest_flattened_error(transport: str = "rest"): +def test_get_lun_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8078,13 +22261,13 @@ def test_start_instance_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.start_instance( - instance.StartInstanceRequest(), + client.get_lun( + lun.GetLunRequest(), name="name_value", ) -def test_start_instance_rest_error(): +def test_get_lun_rest_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8093,43 +22276,49 @@ def test_start_instance_rest_error(): @pytest.mark.parametrize( "request_type", [ - instance.StopInstanceRequest, + lun.ListLunsRequest, dict, ], ) -def test_stop_instance_rest(request_type): +def test_list_luns_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = lun.ListLunsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = lun.ListLunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.stop_instance(request) + response = client.list_luns(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListLunsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_stop_instance_rest_required_fields(request_type=instance.StopInstanceRequest): +def test_list_luns_rest_required_fields(request_type=lun.ListLunsRequest): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8144,21 +22333,28 @@ def test_stop_instance_rest_required_fields(request_type=instance.StopInstanceRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).stop_instance._get_unset_required_fields(jsonified_request) + ).list_luns._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).stop_instance._get_unset_required_fields(jsonified_request) + ).list_luns._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8167,7 +22363,7 @@ def test_stop_instance_rest_required_fields(request_type=instance.StopInstanceRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = lun.ListLunsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8179,37 +22375,46 @@ def test_stop_instance_rest_required_fields(request_type=instance.StopInstanceRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = lun.ListLunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.stop_instance(request) + response = client.list_luns(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_stop_instance_rest_unset_required_fields(): +def test_list_luns_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.stop_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_luns._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_stop_instance_rest_interceptors(null_interceptor): +def test_list_luns_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8222,15 +22427,13 @@ def test_stop_instance_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_stop_instance" + transports.BareMetalSolutionRestInterceptor, "post_list_luns" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_stop_instance" + transports.BareMetalSolutionRestInterceptor, "pre_list_luns" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = instance.StopInstanceRequest.pb(instance.StopInstanceRequest()) + pb_message = lun.ListLunsRequest.pb(lun.ListLunsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8241,19 +22444,17 @@ def test_stop_instance_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = lun.ListLunsResponse.to_json(lun.ListLunsResponse()) - request = instance.StopInstanceRequest() + request = lun.ListLunsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = lun.ListLunsResponse() - client.stop_instance( + client.list_luns( request, metadata=[ ("key", "val"), @@ -8265,8 +22466,8 @@ def test_stop_instance_rest_interceptors(null_interceptor): post.assert_called_once() -def test_stop_instance_rest_bad_request( - transport: str = "rest", request_type=instance.StopInstanceRequest +def test_list_luns_rest_bad_request( + transport: str = "rest", request_type=lun.ListLunsRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8274,7 +22475,7 @@ def test_stop_instance_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8286,10 +22487,10 @@ def test_stop_instance_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.stop_instance(request) + client.list_luns(request) -def test_stop_instance_rest_flattened(): +def test_list_luns_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8298,40 +22499,41 @@ def test_stop_instance_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = lun.ListLunsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/instances/sample3" + "parent": "projects/sample1/locations/sample2/volumes/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = lun.ListLunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.stop_instance(**mock_args) + client.list_luns(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/locations/*/instances/*}:stop" + "%s/v2/{parent=projects/*/locations/*/volumes/*}/luns" % client.transport._host, args[1], ) -def test_stop_instance_rest_flattened_error(transport: str = "rest"): +def test_list_luns_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8340,33 +22542,92 @@ def test_stop_instance_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.stop_instance( - instance.StopInstanceRequest(), - name="name_value", + client.list_luns( + lun.ListLunsRequest(), + parent="parent_value", ) -def test_stop_instance_rest_error(): +def test_list_luns_rest_pager(transport: str = "rest"): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + lun.ListLunsResponse( + luns=[ + lun.Lun(), + lun.Lun(), + lun.Lun(), + ], + next_page_token="abc", + ), + lun.ListLunsResponse( + luns=[], + next_page_token="def", + ), + lun.ListLunsResponse( + luns=[ + lun.Lun(), + ], + next_page_token="ghi", + ), + lun.ListLunsResponse( + luns=[ + lun.Lun(), + lun.Lun(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(lun.ListLunsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/volumes/sample3" + } + + pager = client.list_luns(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, lun.Lun) for i in results) + + pages = list(client.list_luns(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - gcb_instance.DetachLunRequest, + lun.EvictLunRequest, dict, ], ) -def test_detach_lun_rest(request_type): +def test_evict_lun_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/luns/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8381,18 +22642,17 @@ def test_detach_lun_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.detach_lun(request) + response = client.evict_lun(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_detach_lun_rest_required_fields(request_type=gcb_instance.DetachLunRequest): +def test_evict_lun_rest_required_fields(request_type=lun.EvictLunRequest): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["instance"] = "" - request_init["lun"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8407,24 +22667,21 @@ def test_detach_lun_rest_required_fields(request_type=gcb_instance.DetachLunRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).detach_lun._get_unset_required_fields(jsonified_request) + ).evict_lun._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["instance"] = "instance_value" - jsonified_request["lun"] = "lun_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).detach_lun._get_unset_required_fields(jsonified_request) + ).evict_lun._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "instance" in jsonified_request - assert jsonified_request["instance"] == "instance_value" - assert "lun" in jsonified_request - assert jsonified_request["lun"] == "lun_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8458,32 +22715,24 @@ def test_detach_lun_rest_required_fields(request_type=gcb_instance.DetachLunRequ response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.detach_lun(request) + response = client.evict_lun(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_detach_lun_rest_unset_required_fields(): +def test_evict_lun_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.detach_lun._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "instance", - "lun", - ) - ) - ) + unset_fields = transport.evict_lun._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_detach_lun_rest_interceptors(null_interceptor): +def test_evict_lun_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8498,13 +22747,13 @@ def test_detach_lun_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_detach_lun" + transports.BareMetalSolutionRestInterceptor, "post_evict_lun" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_detach_lun" + transports.BareMetalSolutionRestInterceptor, "pre_evict_lun" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcb_instance.DetachLunRequest.pb(gcb_instance.DetachLunRequest()) + pb_message = lun.EvictLunRequest.pb(lun.EvictLunRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8519,7 +22768,7 @@ def test_detach_lun_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = gcb_instance.DetachLunRequest() + request = lun.EvictLunRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -8527,7 +22776,7 @@ def test_detach_lun_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.detach_lun( + client.evict_lun( request, metadata=[ ("key", "val"), @@ -8539,8 +22788,8 @@ def test_detach_lun_rest_interceptors(null_interceptor): post.assert_called_once() -def test_detach_lun_rest_bad_request( - transport: str = "rest", request_type=gcb_instance.DetachLunRequest +def test_evict_lun_rest_bad_request( + transport: str = "rest", request_type=lun.EvictLunRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8548,7 +22797,9 @@ def test_detach_lun_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"instance": "projects/sample1/locations/sample2/instances/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/volumes/sample3/luns/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8560,10 +22811,10 @@ def test_detach_lun_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.detach_lun(request) + client.evict_lun(request) -def test_detach_lun_rest_flattened(): +def test_evict_lun_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8576,13 +22827,12 @@ def test_detach_lun_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "instance": "projects/sample1/locations/sample2/instances/sample3" + "name": "projects/sample1/locations/sample2/volumes/sample3/luns/sample4" } # get truthy value for each flattened field mock_args = dict( - instance="instance_value", - lun="lun_value", + name="name_value", ) mock_args.update(sample_request) @@ -8593,20 +22843,20 @@ def test_detach_lun_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.detach_lun(**mock_args) + client.evict_lun(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{instance=projects/*/locations/*/instances/*}:detachLun" + "%s/v2/{name=projects/*/locations/*/volumes/*/luns/*}:evict" % client.transport._host, args[1], ) -def test_detach_lun_rest_flattened_error(transport: str = "rest"): +def test_evict_lun_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8615,14 +22865,13 @@ def test_detach_lun_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.detach_lun( - gcb_instance.DetachLunRequest(), - instance="instance_value", - lun="lun_value", + client.evict_lun( + lun.EvictLunRequest(), + name="name_value", ) -def test_detach_lun_rest_error(): +def test_evict_lun_rest_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8631,49 +22880,59 @@ def test_detach_lun_rest_error(): @pytest.mark.parametrize( "request_type", [ - volume.ListVolumesRequest, + nfs_share.GetNfsShareRequest, dict, ], ) -def test_list_volumes_rest(request_type): +def test_get_nfs_share_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = volume.ListVolumesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = nfs_share.NfsShare( + name="name_value", + nfs_share_id="nfs_share_id_value", + id="id_value", + state=nfs_share.NfsShare.State.PROVISIONED, + volume="volume_value", + requested_size_gib=1917, + storage_type=nfs_share.NfsShare.StorageType.SSD, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = volume.ListVolumesResponse.pb(return_value) + pb_return_value = nfs_share.NfsShare.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_volumes(request) + response = client.get_nfs_share(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVolumesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, nfs_share.NfsShare) + assert response.name == "name_value" + assert response.nfs_share_id == "nfs_share_id_value" + assert response.id == "id_value" + assert response.state == nfs_share.NfsShare.State.PROVISIONED + assert response.volume == "volume_value" + assert response.requested_size_gib == 1917 + assert response.storage_type == nfs_share.NfsShare.StorageType.SSD -def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesRequest): +def test_get_nfs_share_rest_required_fields(request_type=nfs_share.GetNfsShareRequest): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8688,29 +22947,21 @@ def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_volumes._get_unset_required_fields(jsonified_request) + ).get_nfs_share._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_volumes._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).get_nfs_share._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8719,7 +22970,7 @@ def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = volume.ListVolumesResponse() + return_value = nfs_share.NfsShare() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8739,39 +22990,30 @@ def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesReques response_value = Response() response_value.status_code = 200 - pb_return_value = volume.ListVolumesResponse.pb(return_value) + pb_return_value = nfs_share.NfsShare.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_volumes(request) + response = client.get_nfs_share(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_volumes_rest_unset_required_fields(): +def test_get_nfs_share_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_volumes._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_nfs_share._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_volumes_rest_interceptors(null_interceptor): +def test_get_nfs_share_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8784,13 +23026,13 @@ def test_list_volumes_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_list_volumes" + transports.BareMetalSolutionRestInterceptor, "post_get_nfs_share" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_list_volumes" + transports.BareMetalSolutionRestInterceptor, "pre_get_nfs_share" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = volume.ListVolumesRequest.pb(volume.ListVolumesRequest()) + pb_message = nfs_share.GetNfsShareRequest.pb(nfs_share.GetNfsShareRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8801,19 +23043,17 @@ def test_list_volumes_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = volume.ListVolumesResponse.to_json( - volume.ListVolumesResponse() - ) + req.return_value._content = nfs_share.NfsShare.to_json(nfs_share.NfsShare()) - request = volume.ListVolumesRequest() + request = nfs_share.GetNfsShareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = volume.ListVolumesResponse() + post.return_value = nfs_share.NfsShare() - client.list_volumes( + client.get_nfs_share( request, metadata=[ ("key", "val"), @@ -8825,8 +23065,8 @@ def test_list_volumes_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_volumes_rest_bad_request( - transport: str = "rest", request_type=volume.ListVolumesRequest +def test_get_nfs_share_rest_bad_request( + transport: str = "rest", request_type=nfs_share.GetNfsShareRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8834,7 +23074,7 @@ def test_list_volumes_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8846,10 +23086,10 @@ def test_list_volumes_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_volumes(request) + client.get_nfs_share(request) -def test_list_volumes_rest_flattened(): +def test_get_nfs_share_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8858,38 +23098,40 @@ def test_list_volumes_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = volume.ListVolumesResponse() + return_value = nfs_share.NfsShare() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/nfsShares/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = volume.ListVolumesResponse.pb(return_value) + pb_return_value = nfs_share.NfsShare.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_volumes(**mock_args) + client.get_nfs_share(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{parent=projects/*/locations/*}/volumes" % client.transport._host, + "%s/v2/{name=projects/*/locations/*/nfsShares/*}" % client.transport._host, args[1], ) -def test_list_volumes_rest_flattened_error(transport: str = "rest"): +def test_get_nfs_share_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8898,142 +23140,66 @@ def test_list_volumes_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_volumes( - volume.ListVolumesRequest(), - parent="parent_value", + client.get_nfs_share( + nfs_share.GetNfsShareRequest(), + name="name_value", ) -def test_list_volumes_rest_pager(transport: str = "rest"): +def test_get_nfs_share_rest_error(): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), - ], - next_page_token="abc", - ), - volume.ListVolumesResponse( - volumes=[], - next_page_token="def", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - ], - next_page_token="ghi", - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(volume.ListVolumesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_volumes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, volume.Volume) for i in results) - - pages = list(client.list_volumes(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - volume.GetVolumeRequest, + nfs_share.ListNfsSharesRequest, dict, ], ) -def test_get_volume_rest(request_type): +def test_list_nfs_shares_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = volume.Volume( - name="name_value", - id="id_value", - storage_type=volume.Volume.StorageType.SSD, - state=volume.Volume.State.CREATING, - requested_size_gib=1917, - current_size_gib=1710, - emergency_size_gib=1898, - auto_grown_size_gib=2032, - remaining_space_gib=1974, - snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, - snapshot_enabled=True, - pod="pod_value", + return_value = nfs_share.ListNfsSharesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = volume.Volume.pb(return_value) + pb_return_value = nfs_share.ListNfsSharesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_volume(request) + response = client.list_nfs_shares(request) # Establish that the response is the type that we expect. - assert isinstance(response, volume.Volume) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.storage_type == volume.Volume.StorageType.SSD - assert response.state == volume.Volume.State.CREATING - assert response.requested_size_gib == 1917 - assert response.current_size_gib == 1710 - assert response.emergency_size_gib == 1898 - assert response.auto_grown_size_gib == 2032 - assert response.remaining_space_gib == 1974 - assert ( - response.snapshot_auto_delete_behavior - == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED - ) - assert response.snapshot_enabled is True - assert response.pod == "pod_value" + assert isinstance(response, pagers.ListNfsSharesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): +def test_list_nfs_shares_rest_required_fields( + request_type=nfs_share.ListNfsSharesRequest, +): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9048,21 +23214,29 @@ def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_volume._get_unset_required_fields(jsonified_request) + ).list_nfs_shares._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_volume._get_unset_required_fields(jsonified_request) + ).list_nfs_shares._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9071,7 +23245,7 @@ def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = volume.Volume() + return_value = nfs_share.ListNfsSharesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9091,30 +23265,39 @@ def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = volume.Volume.pb(return_value) + pb_return_value = nfs_share.ListNfsSharesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_volume(request) + response = client.list_nfs_shares(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_volume_rest_unset_required_fields(): +def test_list_nfs_shares_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_volume._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_nfs_shares._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_volume_rest_interceptors(null_interceptor): +def test_list_nfs_shares_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9127,13 +23310,13 @@ def test_get_volume_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_get_volume" + transports.BareMetalSolutionRestInterceptor, "post_list_nfs_shares" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_get_volume" + transports.BareMetalSolutionRestInterceptor, "pre_list_nfs_shares" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = volume.GetVolumeRequest.pb(volume.GetVolumeRequest()) + pb_message = nfs_share.ListNfsSharesRequest.pb(nfs_share.ListNfsSharesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9144,17 +23327,19 @@ def test_get_volume_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = volume.Volume.to_json(volume.Volume()) + req.return_value._content = nfs_share.ListNfsSharesResponse.to_json( + nfs_share.ListNfsSharesResponse() + ) - request = volume.GetVolumeRequest() + request = nfs_share.ListNfsSharesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = volume.Volume() + post.return_value = nfs_share.ListNfsSharesResponse() - client.get_volume( + client.list_nfs_shares( request, metadata=[ ("key", "val"), @@ -9166,8 +23351,8 @@ def test_get_volume_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_volume_rest_bad_request( - transport: str = "rest", request_type=volume.GetVolumeRequest +def test_list_nfs_shares_rest_bad_request( + transport: str = "rest", request_type=nfs_share.ListNfsSharesRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9175,7 +23360,7 @@ def test_get_volume_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9187,10 +23372,10 @@ def test_get_volume_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_volume(request) + client.list_nfs_shares(request) -def test_get_volume_rest_flattened(): +def test_list_nfs_shares_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9199,66 +23384,121 @@ def test_get_volume_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = volume.Volume() + return_value = nfs_share.ListNfsSharesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/volumes/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = volume.Volume.pb(return_value) + pb_return_value = nfs_share.ListNfsSharesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_volume(**mock_args) + client.list_nfs_shares(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/locations/*/volumes/*}" % client.transport._host, + "%s/v2/{parent=projects/*/locations/*}/nfsShares" % client.transport._host, args[1], ) -def test_get_volume_rest_flattened_error(transport: str = "rest"): +def test_list_nfs_shares_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_nfs_shares( + nfs_share.ListNfsSharesRequest(), + parent="parent_value", + ) + + +def test_list_nfs_shares_rest_pager(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_volume( - volume.GetVolumeRequest(), - name="name_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + nfs_share.NfsShare(), + nfs_share.NfsShare(), + ], + next_page_token="abc", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[], + next_page_token="def", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + ], + next_page_token="ghi", + ), + nfs_share.ListNfsSharesResponse( + nfs_shares=[ + nfs_share.NfsShare(), + nfs_share.NfsShare(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(nfs_share.ListNfsSharesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + pager = client.list_nfs_shares(request=sample_request) -def test_get_volume_rest_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, nfs_share.NfsShare) for i in results) + + pages = list(client.list_nfs_shares(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - gcb_volume.UpdateVolumeRequest, + gcb_nfs_share.UpdateNfsShareRequest, dict, ], ) -def test_update_volume_rest(request_type): +def test_update_nfs_share_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9266,28 +23506,29 @@ def test_update_volume_rest(request_type): # send a request that will satisfy transcoding request_init = { - "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} + "nfs_share": {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} } - request_init["volume"] = { - "name": "projects/sample1/locations/sample2/volumes/sample3", + request_init["nfs_share"] = { + "name": "projects/sample1/locations/sample2/nfsShares/sample3", + "nfs_share_id": "nfs_share_id_value", "id": "id_value", - "storage_type": 1, "state": 1, - "requested_size_gib": 1917, - "current_size_gib": 1710, - "emergency_size_gib": 1898, - "auto_grown_size_gib": 2032, - "remaining_space_gib": 1974, - "snapshot_reservation_detail": { - "reserved_space_gib": 1884, - "reserved_space_used_percent": 2859, - "reserved_space_remaining_gib": 2933, - "reserved_space_percent": 2331, - }, - "snapshot_auto_delete_behavior": 1, + "volume": "volume_value", + "allowed_clients": [ + { + "network": "network_value", + "share_ip": "share_ip_value", + "allowed_clients_cidr": "allowed_clients_cidr_value", + "mount_permissions": 1, + "allow_dev": True, + "allow_suid": True, + "no_root_squash": True, + "nfs_path": "nfs_path_value", + } + ], "labels": {}, - "snapshot_enabled": True, - "pod": "pod_value", + "requested_size_gib": 1917, + "storage_type": 1, } request = request_type(**request_init) @@ -9303,14 +23544,14 @@ def test_update_volume_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_volume(request) + response = client.update_nfs_share(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_volume_rest_required_fields( - request_type=gcb_volume.UpdateVolumeRequest, +def test_update_nfs_share_rest_required_fields( + request_type=gcb_nfs_share.UpdateNfsShareRequest, ): transport_class = transports.BareMetalSolutionRestTransport @@ -9329,14 +23570,14 @@ def test_update_volume_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_volume._get_unset_required_fields(jsonified_request) + ).update_nfs_share._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_volume._get_unset_required_fields(jsonified_request) + ).update_nfs_share._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) @@ -9375,24 +23616,24 @@ def test_update_volume_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_volume(request) + response = client.update_nfs_share(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_volume_rest_unset_required_fields(): +def test_update_nfs_share_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_volume._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("volume",))) + unset_fields = transport.update_nfs_share._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("nfsShare",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_volume_rest_interceptors(null_interceptor): +def test_update_nfs_share_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9407,13 +23648,15 @@ def test_update_volume_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_update_volume" + transports.BareMetalSolutionRestInterceptor, "post_update_nfs_share" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_update_volume" + transports.BareMetalSolutionRestInterceptor, "pre_update_nfs_share" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcb_volume.UpdateVolumeRequest.pb(gcb_volume.UpdateVolumeRequest()) + pb_message = gcb_nfs_share.UpdateNfsShareRequest.pb( + gcb_nfs_share.UpdateNfsShareRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9428,7 +23671,7 @@ def test_update_volume_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = gcb_volume.UpdateVolumeRequest() + request = gcb_nfs_share.UpdateNfsShareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -9436,7 +23679,7 @@ def test_update_volume_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_volume( + client.update_nfs_share( request, metadata=[ ("key", "val"), @@ -9448,8 +23691,8 @@ def test_update_volume_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_volume_rest_bad_request( - transport: str = "rest", request_type=gcb_volume.UpdateVolumeRequest +def test_update_nfs_share_rest_bad_request( + transport: str = "rest", request_type=gcb_nfs_share.UpdateNfsShareRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9458,28 +23701,29 @@ def test_update_volume_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} + "nfs_share": {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} } - request_init["volume"] = { - "name": "projects/sample1/locations/sample2/volumes/sample3", + request_init["nfs_share"] = { + "name": "projects/sample1/locations/sample2/nfsShares/sample3", + "nfs_share_id": "nfs_share_id_value", "id": "id_value", - "storage_type": 1, "state": 1, - "requested_size_gib": 1917, - "current_size_gib": 1710, - "emergency_size_gib": 1898, - "auto_grown_size_gib": 2032, - "remaining_space_gib": 1974, - "snapshot_reservation_detail": { - "reserved_space_gib": 1884, - "reserved_space_used_percent": 2859, - "reserved_space_remaining_gib": 2933, - "reserved_space_percent": 2331, - }, - "snapshot_auto_delete_behavior": 1, + "volume": "volume_value", + "allowed_clients": [ + { + "network": "network_value", + "share_ip": "share_ip_value", + "allowed_clients_cidr": "allowed_clients_cidr_value", + "mount_permissions": 1, + "allow_dev": True, + "allow_suid": True, + "no_root_squash": True, + "nfs_path": "nfs_path_value", + } + ], "labels": {}, - "snapshot_enabled": True, - "pod": "pod_value", + "requested_size_gib": 1917, + "storage_type": 1, } request = request_type(**request_init) @@ -9492,10 +23736,10 @@ def test_update_volume_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_volume(request) + client.update_nfs_share(request) -def test_update_volume_rest_flattened(): +def test_update_nfs_share_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9508,12 +23752,14 @@ def test_update_volume_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "volume": {"name": "projects/sample1/locations/sample2/volumes/sample3"} + "nfs_share": { + "name": "projects/sample1/locations/sample2/nfsShares/sample3" + } } # get truthy value for each flattened field mock_args = dict( - volume=gcb_volume.Volume(name="name_value"), + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -9525,20 +23771,20 @@ def test_update_volume_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_volume(**mock_args) + client.update_nfs_share(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{volume.name=projects/*/locations/*/volumes/*}" + "%s/v2/{nfs_share.name=projects/*/locations/*/nfsShares/*}" % client.transport._host, args[1], ) -def test_update_volume_rest_flattened_error(transport: str = "rest"): +def test_update_nfs_share_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9547,14 +23793,14 @@ def test_update_volume_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_volume( - gcb_volume.UpdateVolumeRequest(), - volume=gcb_volume.Volume(name="name_value"), + client.update_nfs_share( + gcb_nfs_share.UpdateNfsShareRequest(), + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_volume_rest_error(): +def test_update_nfs_share_rest_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9563,18 +23809,40 @@ def test_update_volume_rest_error(): @pytest.mark.parametrize( "request_type", [ - gcb_volume.ResizeVolumeRequest, + gcb_nfs_share.CreateNfsShareRequest, dict, ], ) -def test_resize_volume_rest(request_type): +def test_create_nfs_share_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"volume": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["nfs_share"] = { + "name": "name_value", + "nfs_share_id": "nfs_share_id_value", + "id": "id_value", + "state": 1, + "volume": "volume_value", + "allowed_clients": [ + { + "network": "network_value", + "share_ip": "share_ip_value", + "allowed_clients_cidr": "allowed_clients_cidr_value", + "mount_permissions": 1, + "allow_dev": True, + "allow_suid": True, + "no_root_squash": True, + "nfs_path": "nfs_path_value", + } + ], + "labels": {}, + "requested_size_gib": 1917, + "storage_type": 1, + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9589,19 +23857,19 @@ def test_resize_volume_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.resize_volume(request) + response = client.create_nfs_share(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_resize_volume_rest_required_fields( - request_type=gcb_volume.ResizeVolumeRequest, +def test_create_nfs_share_rest_required_fields( + request_type=gcb_nfs_share.CreateNfsShareRequest, ): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["volume"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9616,21 +23884,21 @@ def test_resize_volume_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).resize_volume._get_unset_required_fields(jsonified_request) + ).create_nfs_share._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["volume"] = "volume_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).resize_volume._get_unset_required_fields(jsonified_request) + ).create_nfs_share._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "volume" in jsonified_request - assert jsonified_request["volume"] == "volume_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9664,24 +23932,32 @@ def test_resize_volume_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.resize_volume(request) + response = client.create_nfs_share(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_resize_volume_rest_unset_required_fields(): +def test_create_nfs_share_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.resize_volume._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("volume",))) + unset_fields = transport.create_nfs_share._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "nfsShare", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_resize_volume_rest_interceptors(null_interceptor): +def test_create_nfs_share_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9696,13 +23972,15 @@ def test_resize_volume_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_resize_volume" + transports.BareMetalSolutionRestInterceptor, "post_create_nfs_share" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_resize_volume" + transports.BareMetalSolutionRestInterceptor, "pre_create_nfs_share" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcb_volume.ResizeVolumeRequest.pb(gcb_volume.ResizeVolumeRequest()) + pb_message = gcb_nfs_share.CreateNfsShareRequest.pb( + gcb_nfs_share.CreateNfsShareRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9717,7 +23995,7 @@ def test_resize_volume_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = gcb_volume.ResizeVolumeRequest() + request = gcb_nfs_share.CreateNfsShareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -9725,7 +24003,7 @@ def test_resize_volume_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.resize_volume( + client.create_nfs_share( request, metadata=[ ("key", "val"), @@ -9737,8 +24015,8 @@ def test_resize_volume_rest_interceptors(null_interceptor): post.assert_called_once() -def test_resize_volume_rest_bad_request( - transport: str = "rest", request_type=gcb_volume.ResizeVolumeRequest +def test_create_nfs_share_rest_bad_request( + transport: str = "rest", request_type=gcb_nfs_share.CreateNfsShareRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9746,7 +24024,29 @@ def test_resize_volume_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"volume": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["nfs_share"] = { + "name": "name_value", + "nfs_share_id": "nfs_share_id_value", + "id": "id_value", + "state": 1, + "volume": "volume_value", + "allowed_clients": [ + { + "network": "network_value", + "share_ip": "share_ip_value", + "allowed_clients_cidr": "allowed_clients_cidr_value", + "mount_permissions": 1, + "allow_dev": True, + "allow_suid": True, + "no_root_squash": True, + "nfs_path": "nfs_path_value", + } + ], + "labels": {}, + "requested_size_gib": 1917, + "storage_type": 1, + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9758,10 +24058,10 @@ def test_resize_volume_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.resize_volume(request) + client.create_nfs_share(request) -def test_resize_volume_rest_flattened(): +def test_create_nfs_share_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9773,14 +24073,12 @@ def test_resize_volume_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "volume": "projects/sample1/locations/sample2/volumes/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - volume="volume_value", - size_gib=844, + parent="parent_value", + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), ) mock_args.update(sample_request) @@ -9791,36 +24089,35 @@ def test_resize_volume_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.resize_volume(**mock_args) + client.create_nfs_share(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{volume=projects/*/locations/*/volumes/*}:resize" - % client.transport._host, + "%s/v2/{parent=projects/*/locations/*}/nfsShares" % client.transport._host, args[1], ) -def test_resize_volume_rest_flattened_error(transport: str = "rest"): +def test_create_nfs_share_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.resize_volume( - gcb_volume.ResizeVolumeRequest(), - volume="volume_value", - size_gib=844, + # fields is an error. + with pytest.raises(ValueError): + client.create_nfs_share( + gcb_nfs_share.CreateNfsShareRequest(), + parent="parent_value", + nfs_share=gcb_nfs_share.NfsShare(name="name_value"), ) -def test_resize_volume_rest_error(): +def test_create_nfs_share_rest_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9829,49 +24126,62 @@ def test_resize_volume_rest_error(): @pytest.mark.parametrize( "request_type", [ - network.ListNetworksRequest, + nfs_share.RenameNfsShareRequest, dict, ], ) -def test_list_networks_rest(request_type): +def test_rename_nfs_share_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = network.ListNetworksResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = nfs_share.NfsShare( + name="name_value", + nfs_share_id="nfs_share_id_value", + id="id_value", + state=nfs_share.NfsShare.State.PROVISIONED, + volume="volume_value", + requested_size_gib=1917, + storage_type=nfs_share.NfsShare.StorageType.SSD, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = network.ListNetworksResponse.pb(return_value) + pb_return_value = nfs_share.NfsShare.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_networks(request) + response = client.rename_nfs_share(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNetworksPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, nfs_share.NfsShare) + assert response.name == "name_value" + assert response.nfs_share_id == "nfs_share_id_value" + assert response.id == "id_value" + assert response.state == nfs_share.NfsShare.State.PROVISIONED + assert response.volume == "volume_value" + assert response.requested_size_gib == 1917 + assert response.storage_type == nfs_share.NfsShare.StorageType.SSD -def test_list_networks_rest_required_fields(request_type=network.ListNetworksRequest): +def test_rename_nfs_share_rest_required_fields( + request_type=nfs_share.RenameNfsShareRequest, +): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" + request_init["new_nfsshare_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9886,29 +24196,24 @@ def test_list_networks_rest_required_fields(request_type=network.ListNetworksReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_networks._get_unset_required_fields(jsonified_request) + ).rename_nfs_share._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" + jsonified_request["newNfsshareId"] = "new_nfsshare_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_networks._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).rename_nfs_share._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "newNfsshareId" in jsonified_request + assert jsonified_request["newNfsshareId"] == "new_nfsshare_id_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9917,7 +24222,7 @@ def test_list_networks_rest_required_fields(request_type=network.ListNetworksReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = network.ListNetworksResponse() + return_value = nfs_share.NfsShare() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9929,47 +24234,47 @@ def test_list_networks_rest_required_fields(request_type=network.ListNetworksReq pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - pb_return_value = network.ListNetworksResponse.pb(return_value) + pb_return_value = nfs_share.NfsShare.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_networks(request) + response = client.rename_nfs_share(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_networks_rest_unset_required_fields(): +def test_rename_nfs_share_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_networks._get_unset_required_fields({}) + unset_fields = transport.rename_nfs_share._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "filter", - "pageSize", - "pageToken", + "name", + "newNfsshareId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_networks_rest_interceptors(null_interceptor): +def test_rename_nfs_share_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9982,13 +24287,15 @@ def test_list_networks_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_list_networks" + transports.BareMetalSolutionRestInterceptor, "post_rename_nfs_share" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_list_networks" + transports.BareMetalSolutionRestInterceptor, "pre_rename_nfs_share" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = network.ListNetworksRequest.pb(network.ListNetworksRequest()) + pb_message = nfs_share.RenameNfsShareRequest.pb( + nfs_share.RenameNfsShareRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9999,19 +24306,17 @@ def test_list_networks_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = network.ListNetworksResponse.to_json( - network.ListNetworksResponse() - ) + req.return_value._content = nfs_share.NfsShare.to_json(nfs_share.NfsShare()) - request = network.ListNetworksRequest() + request = nfs_share.RenameNfsShareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = network.ListNetworksResponse() + post.return_value = nfs_share.NfsShare() - client.list_networks( + client.rename_nfs_share( request, metadata=[ ("key", "val"), @@ -10023,8 +24328,8 @@ def test_list_networks_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_networks_rest_bad_request( - transport: str = "rest", request_type=network.ListNetworksRequest +def test_rename_nfs_share_rest_bad_request( + transport: str = "rest", request_type=nfs_share.RenameNfsShareRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10032,7 +24337,7 @@ def test_list_networks_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10044,10 +24349,10 @@ def test_list_networks_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_networks(request) + client.rename_nfs_share(request) -def test_list_networks_rest_flattened(): +def test_rename_nfs_share_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10056,38 +24361,42 @@ def test_list_networks_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = network.ListNetworksResponse() + return_value = nfs_share.NfsShare() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/nfsShares/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", + new_nfsshare_id="new_nfsshare_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = network.ListNetworksResponse.pb(return_value) + pb_return_value = nfs_share.NfsShare.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_networks(**mock_args) + client.rename_nfs_share(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{parent=projects/*/locations/*}/networks" % client.transport._host, + "%s/v2/{name=projects/*/locations/*/nfsShares/*}:rename" + % client.transport._host, args[1], ) -def test_list_networks_rest_flattened_error(transport: str = "rest"): +def test_rename_nfs_share_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10096,116 +24405,61 @@ def test_list_networks_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_networks( - network.ListNetworksRequest(), - parent="parent_value", + client.rename_nfs_share( + nfs_share.RenameNfsShareRequest(), + name="name_value", + new_nfsshare_id="new_nfsshare_id_value", ) -def test_list_networks_rest_pager(transport: str = "rest"): +def test_rename_nfs_share_rest_error(): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - network.Network(), - ], - next_page_token="abc", - ), - network.ListNetworksResponse( - networks=[], - next_page_token="def", - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - ], - next_page_token="ghi", - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(network.ListNetworksResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_networks(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, network.Network) for i in results) - - pages = list(client.list_networks(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - network.ListNetworkUsageRequest, + nfs_share.DeleteNfsShareRequest, dict, ], ) -def test_list_network_usage_rest(request_type): +def test_delete_nfs_share_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = network.ListNetworkUsageResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = network.ListNetworkUsageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_network_usage(request) + response = client.delete_nfs_share(request) # Establish that the response is the type that we expect. - assert isinstance(response, network.ListNetworkUsageResponse) + assert response.operation.name == "operations/spam" -def test_list_network_usage_rest_required_fields( - request_type=network.ListNetworkUsageRequest, +def test_delete_nfs_share_rest_required_fields( + request_type=nfs_share.DeleteNfsShareRequest, ): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["location"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -10220,21 +24474,21 @@ def test_list_network_usage_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_network_usage._get_unset_required_fields(jsonified_request) + ).delete_nfs_share._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["location"] = "location_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_network_usage._get_unset_required_fields(jsonified_request) + ).delete_nfs_share._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "location" in jsonified_request - assert jsonified_request["location"] == "location_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10243,7 +24497,7 @@ def test_list_network_usage_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = network.ListNetworkUsageResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -10255,38 +24509,36 @@ def test_list_network_usage_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - pb_return_value = network.ListNetworkUsageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_network_usage(request) + response = client.delete_nfs_share(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_network_usage_rest_unset_required_fields(): +def test_delete_nfs_share_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_network_usage._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("location",))) + unset_fields = transport.delete_nfs_share._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_network_usage_rest_interceptors(null_interceptor): +def test_delete_nfs_share_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10299,14 +24551,16 @@ def test_list_network_usage_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_list_network_usage" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_delete_nfs_share" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_list_network_usage" + transports.BareMetalSolutionRestInterceptor, "pre_delete_nfs_share" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = network.ListNetworkUsageRequest.pb( - network.ListNetworkUsageRequest() + pb_message = nfs_share.DeleteNfsShareRequest.pb( + nfs_share.DeleteNfsShareRequest() ) transcode.return_value = { "method": "post", @@ -10318,19 +24572,19 @@ def test_list_network_usage_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = network.ListNetworkUsageResponse.to_json( - network.ListNetworkUsageResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = network.ListNetworkUsageRequest() + request = nfs_share.DeleteNfsShareRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = network.ListNetworkUsageResponse() + post.return_value = operations_pb2.Operation() - client.list_network_usage( + client.delete_nfs_share( request, metadata=[ ("key", "val"), @@ -10342,8 +24596,8 @@ def test_list_network_usage_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_network_usage_rest_bad_request( - transport: str = "rest", request_type=network.ListNetworkUsageRequest +def test_delete_nfs_share_rest_bad_request( + transport: str = "rest", request_type=nfs_share.DeleteNfsShareRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10351,7 +24605,7 @@ def test_list_network_usage_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10363,10 +24617,10 @@ def test_list_network_usage_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_network_usage(request) + client.delete_nfs_share(request) -def test_list_network_usage_rest_flattened(): +def test_delete_nfs_share_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10375,39 +24629,39 @@ def test_list_network_usage_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = network.ListNetworkUsageResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"location": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/nfsShares/sample3" + } # get truthy value for each flattened field mock_args = dict( - location="location_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = network.ListNetworkUsageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_network_usage(**mock_args) + client.delete_nfs_share(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{location=projects/*/locations/*}/networks:listNetworkUsage" - % client.transport._host, + "%s/v2/{name=projects/*/locations/*/nfsShares/*}" % client.transport._host, args[1], ) -def test_list_network_usage_rest_flattened_error(transport: str = "rest"): +def test_delete_nfs_share_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10416,13 +24670,13 @@ def test_list_network_usage_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_network_usage( - network.ListNetworkUsageRequest(), - location="location_value", + client.delete_nfs_share( + nfs_share.DeleteNfsShareRequest(), + name="name_value", ) -def test_list_network_usage_rest_error(): +def test_delete_nfs_share_rest_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -10431,63 +24685,49 @@ def test_list_network_usage_rest_error(): @pytest.mark.parametrize( "request_type", [ - network.GetNetworkRequest, + provisioning.ListProvisioningQuotasRequest, dict, ], ) -def test_get_network_rest(request_type): +def test_list_provisioning_quotas_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/networks/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = network.Network( - name="name_value", - id="id_value", - type_=network.Network.Type.CLIENT, - ip_address="ip_address_value", - mac_address=["mac_address_value"], - state=network.Network.State.PROVISIONING, - vlan_id="vlan_id_value", - cidr="cidr_value", - services_cidr="services_cidr_value", + return_value = provisioning.ListProvisioningQuotasResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = network.Network.pb(return_value) + pb_return_value = provisioning.ListProvisioningQuotasResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_network(request) + response = client.list_provisioning_quotas(request) # Establish that the response is the type that we expect. - assert isinstance(response, network.Network) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.type_ == network.Network.Type.CLIENT - assert response.ip_address == "ip_address_value" - assert response.mac_address == ["mac_address_value"] - assert response.state == network.Network.State.PROVISIONING - assert response.vlan_id == "vlan_id_value" - assert response.cidr == "cidr_value" - assert response.services_cidr == "services_cidr_value" + assert isinstance(response, pagers.ListProvisioningQuotasPager) + assert response.next_page_token == "next_page_token_value" -def test_get_network_rest_required_fields(request_type=network.GetNetworkRequest): +def test_list_provisioning_quotas_rest_required_fields( + request_type=provisioning.ListProvisioningQuotasRequest, +): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -10502,21 +24742,28 @@ def test_get_network_rest_required_fields(request_type=network.GetNetworkRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_network._get_unset_required_fields(jsonified_request) + ).list_provisioning_quotas._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_network._get_unset_required_fields(jsonified_request) + ).list_provisioning_quotas._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10525,7 +24772,7 @@ def test_get_network_rest_required_fields(request_type=network.GetNetworkRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = network.Network() + return_value = provisioning.ListProvisioningQuotasResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -10545,30 +24792,40 @@ def test_get_network_rest_required_fields(request_type=network.GetNetworkRequest response_value = Response() response_value.status_code = 200 - pb_return_value = network.Network.pb(return_value) + pb_return_value = provisioning.ListProvisioningQuotasResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_network(request) + response = client.list_provisioning_quotas(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_network_rest_unset_required_fields(): +def test_list_provisioning_quotas_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_network._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_provisioning_quotas._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_network_rest_interceptors(null_interceptor): +def test_list_provisioning_quotas_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10581,13 +24838,15 @@ def test_get_network_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_get_network" + transports.BareMetalSolutionRestInterceptor, "post_list_provisioning_quotas" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_get_network" + transports.BareMetalSolutionRestInterceptor, "pre_list_provisioning_quotas" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = network.GetNetworkRequest.pb(network.GetNetworkRequest()) + pb_message = provisioning.ListProvisioningQuotasRequest.pb( + provisioning.ListProvisioningQuotasRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10598,17 +24857,19 @@ def test_get_network_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = network.Network.to_json(network.Network()) + req.return_value._content = provisioning.ListProvisioningQuotasResponse.to_json( + provisioning.ListProvisioningQuotasResponse() + ) - request = network.GetNetworkRequest() + request = provisioning.ListProvisioningQuotasRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = network.Network() + post.return_value = provisioning.ListProvisioningQuotasResponse() - client.get_network( + client.list_provisioning_quotas( request, metadata=[ ("key", "val"), @@ -10620,8 +24881,8 @@ def test_get_network_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_network_rest_bad_request( - transport: str = "rest", request_type=network.GetNetworkRequest +def test_list_provisioning_quotas_rest_bad_request( + transport: str = "rest", request_type=provisioning.ListProvisioningQuotasRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10629,7 +24890,7 @@ def test_get_network_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/networks/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10641,10 +24902,10 @@ def test_get_network_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_network(request) + client.list_provisioning_quotas(request) -def test_get_network_rest_flattened(): +def test_list_provisioning_quotas_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10653,38 +24914,39 @@ def test_get_network_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = network.Network() + return_value = provisioning.ListProvisioningQuotasResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/networks/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = network.Network.pb(return_value) + pb_return_value = provisioning.ListProvisioningQuotasResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_network(**mock_args) + client.list_provisioning_quotas(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/locations/*/networks/*}" % client.transport._host, + "%s/v2/{parent=projects/*/locations/*}/provisioningQuotas" + % client.transport._host, args[1], ) -def test_get_network_rest_flattened_error(transport: str = "rest"): +def test_list_provisioning_quotas_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10693,92 +24955,118 @@ def test_get_network_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_network( - network.GetNetworkRequest(), - name="name_value", + client.list_provisioning_quotas( + provisioning.ListProvisioningQuotasRequest(), + parent="parent_value", ) -def test_get_network_rest_error(): +def test_list_provisioning_quotas_rest_pager(transport: str = "rest"): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + ], + next_page_token="abc", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[], + next_page_token="def", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + ], + next_page_token="ghi", + ), + provisioning.ListProvisioningQuotasResponse( + provisioning_quotas=[ + provisioning.ProvisioningQuota(), + provisioning.ProvisioningQuota(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + provisioning.ListProvisioningQuotasResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_provisioning_quotas(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, provisioning.ProvisioningQuota) for i in results) + + pages = list(client.list_provisioning_quotas(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - gcb_network.UpdateNetworkRequest, + provisioning.SubmitProvisioningConfigRequest, dict, ], ) -def test_update_network_rest(request_type): +def test_submit_provisioning_config_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "network": {"name": "projects/sample1/locations/sample2/networks/sample3"} - } - request_init["network"] = { - "name": "projects/sample1/locations/sample2/networks/sample3", - "id": "id_value", - "type_": 1, - "ip_address": "ip_address_value", - "mac_address": ["mac_address_value1", "mac_address_value2"], - "state": 1, - "vlan_id": "vlan_id_value", - "cidr": "cidr_value", - "vrf": { - "name": "name_value", - "state": 1, - "qos_policy": {"bandwidth_gbps": 0.1472}, - "vlan_attachments": [ - { - "peer_vlan_id": 1256, - "peer_ip": "peer_ip_value", - "router_ip": "router_ip_value", - } - ], - }, - "labels": {}, - "services_cidr": "services_cidr_value", - "reservations": [ - { - "start_address": "start_address_value", - "end_address": "end_address_value", - "note": "note_value", - } - ], - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = provisioning.SubmitProvisioningConfigResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = provisioning.SubmitProvisioningConfigResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_network(request) + response = client.submit_provisioning_config(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, provisioning.SubmitProvisioningConfigResponse) -def test_update_network_rest_required_fields( - request_type=gcb_network.UpdateNetworkRequest, +def test_submit_provisioning_config_rest_required_fields( + request_type=provisioning.SubmitProvisioningConfigRequest, ): transport_class = transports.BareMetalSolutionRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -10793,19 +25081,21 @@ def test_update_network_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_network._get_unset_required_fields(jsonified_request) + ).submit_provisioning_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_network._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).submit_provisioning_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10814,7 +25104,7 @@ def test_update_network_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = provisioning.SubmitProvisioningConfigResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -10826,7 +25116,7 @@ def test_update_network_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -10834,29 +25124,41 @@ def test_update_network_rest_required_fields( response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = provisioning.SubmitProvisioningConfigResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_network(request) + response = client.submit_provisioning_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_network_rest_unset_required_fields(): +def test_submit_provisioning_config_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_network._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("network",))) + unset_fields = transport.submit_provisioning_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "provisioningConfig", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_network_rest_interceptors(null_interceptor): +def test_submit_provisioning_config_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10869,16 +25171,14 @@ def test_update_network_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_update_network" + transports.BareMetalSolutionRestInterceptor, "post_submit_provisioning_config" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_update_network" + transports.BareMetalSolutionRestInterceptor, "pre_submit_provisioning_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcb_network.UpdateNetworkRequest.pb( - gcb_network.UpdateNetworkRequest() + pb_message = provisioning.SubmitProvisioningConfigRequest.pb( + provisioning.SubmitProvisioningConfigRequest() ) transcode.return_value = { "method": "post", @@ -10890,19 +25190,21 @@ def test_update_network_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + provisioning.SubmitProvisioningConfigResponse.to_json( + provisioning.SubmitProvisioningConfigResponse() + ) ) - request = gcb_network.UpdateNetworkRequest() + request = provisioning.SubmitProvisioningConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = provisioning.SubmitProvisioningConfigResponse() - client.update_network( + client.submit_provisioning_config( request, metadata=[ ("key", "val"), @@ -10914,8 +25216,8 @@ def test_update_network_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_network_rest_bad_request( - transport: str = "rest", request_type=gcb_network.UpdateNetworkRequest +def test_submit_provisioning_config_rest_bad_request( + transport: str = "rest", request_type=provisioning.SubmitProvisioningConfigRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10923,40 +25225,7 @@ def test_update_network_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "network": {"name": "projects/sample1/locations/sample2/networks/sample3"} - } - request_init["network"] = { - "name": "projects/sample1/locations/sample2/networks/sample3", - "id": "id_value", - "type_": 1, - "ip_address": "ip_address_value", - "mac_address": ["mac_address_value1", "mac_address_value2"], - "state": 1, - "vlan_id": "vlan_id_value", - "cidr": "cidr_value", - "vrf": { - "name": "name_value", - "state": 1, - "qos_policy": {"bandwidth_gbps": 0.1472}, - "vlan_attachments": [ - { - "peer_vlan_id": 1256, - "peer_ip": "peer_ip_value", - "router_ip": "router_ip_value", - } - ], - }, - "labels": {}, - "services_cidr": "services_cidr_value", - "reservations": [ - { - "start_address": "start_address_value", - "end_address": "end_address_value", - "note": "note_value", - } - ], - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10968,10 +25237,10 @@ def test_update_network_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_network(request) + client.submit_provisioning_config(request) -def test_update_network_rest_flattened(): +def test_submit_provisioning_config_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10980,41 +25249,40 @@ def test_update_network_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = provisioning.SubmitProvisioningConfigResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "network": {"name": "projects/sample1/locations/sample2/networks/sample3"} - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - network=gcb_network.Network(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = provisioning.SubmitProvisioningConfigResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_network(**mock_args) + client.submit_provisioning_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{network.name=projects/*/locations/*/networks/*}" + "%s/v2/{parent=projects/*/locations/*}/provisioningConfigs:submit" % client.transport._host, args[1], ) -def test_update_network_rest_flattened_error(transport: str = "rest"): +def test_submit_provisioning_config_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11023,14 +25291,14 @@ def test_update_network_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_network( - gcb_network.UpdateNetworkRequest(), - network=gcb_network.Network(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.submit_provisioning_config( + provisioning.SubmitProvisioningConfigRequest(), + parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), ) -def test_update_network_rest_error(): +def test_submit_provisioning_config_rest_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11039,11 +25307,11 @@ def test_update_network_rest_error(): @pytest.mark.parametrize( "request_type", [ - lun.GetLunRequest, + provisioning.GetProvisioningConfigRequest, dict, ], ) -def test_get_lun_rest(request_type): +def test_get_provisioning_config_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11051,51 +25319,53 @@ def test_get_lun_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/luns/sample4" + "name": "projects/sample1/locations/sample2/provisioningConfigs/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = lun.Lun( + return_value = provisioning.ProvisioningConfig( name="name_value", - id="id_value", - state=lun.Lun.State.CREATING, - size_gb=739, - multiprotocol_type=lun.Lun.MultiprotocolType.LINUX, - storage_volume="storage_volume_value", - shareable=True, - boot_lun=True, - storage_type=lun.Lun.StorageType.SSD, - wwid="wwid_value", + ticket_id="ticket_id_value", + handover_service_account="handover_service_account_value", + email="email_value", + state=provisioning.ProvisioningConfig.State.DRAFT, + location="location_value", + cloud_console_uri="cloud_console_uri_value", + vpc_sc_enabled=True, + status_message="status_message_value", + custom_id="custom_id_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = lun.Lun.pb(return_value) + pb_return_value = provisioning.ProvisioningConfig.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_lun(request) + response = client.get_provisioning_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, lun.Lun) + assert isinstance(response, provisioning.ProvisioningConfig) assert response.name == "name_value" - assert response.id == "id_value" - assert response.state == lun.Lun.State.CREATING - assert response.size_gb == 739 - assert response.multiprotocol_type == lun.Lun.MultiprotocolType.LINUX - assert response.storage_volume == "storage_volume_value" - assert response.shareable is True - assert response.boot_lun is True - assert response.storage_type == lun.Lun.StorageType.SSD - assert response.wwid == "wwid_value" - - -def test_get_lun_rest_required_fields(request_type=lun.GetLunRequest): + assert response.ticket_id == "ticket_id_value" + assert response.handover_service_account == "handover_service_account_value" + assert response.email == "email_value" + assert response.state == provisioning.ProvisioningConfig.State.DRAFT + assert response.location == "location_value" + assert response.cloud_console_uri == "cloud_console_uri_value" + assert response.vpc_sc_enabled is True + assert response.status_message == "status_message_value" + assert response.custom_id == "custom_id_value" + + +def test_get_provisioning_config_rest_required_fields( + request_type=provisioning.GetProvisioningConfigRequest, +): transport_class = transports.BareMetalSolutionRestTransport request_init = {} @@ -11114,7 +25384,7 @@ def test_get_lun_rest_required_fields(request_type=lun.GetLunRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_lun._get_unset_required_fields(jsonified_request) + ).get_provisioning_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -11123,7 +25393,7 @@ def test_get_lun_rest_required_fields(request_type=lun.GetLunRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_lun._get_unset_required_fields(jsonified_request) + ).get_provisioning_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -11137,7 +25407,7 @@ def test_get_lun_rest_required_fields(request_type=lun.GetLunRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = lun.Lun() + return_value = provisioning.ProvisioningConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11157,30 +25427,30 @@ def test_get_lun_rest_required_fields(request_type=lun.GetLunRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = lun.Lun.pb(return_value) + pb_return_value = provisioning.ProvisioningConfig.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_lun(request) + response = client.get_provisioning_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_lun_rest_unset_required_fields(): +def test_get_provisioning_config_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_lun._get_unset_required_fields({}) + unset_fields = transport.get_provisioning_config._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_lun_rest_interceptors(null_interceptor): +def test_get_provisioning_config_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11193,13 +25463,15 @@ def test_get_lun_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_get_lun" + transports.BareMetalSolutionRestInterceptor, "post_get_provisioning_config" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_get_lun" + transports.BareMetalSolutionRestInterceptor, "pre_get_provisioning_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = lun.GetLunRequest.pb(lun.GetLunRequest()) + pb_message = provisioning.GetProvisioningConfigRequest.pb( + provisioning.GetProvisioningConfigRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11210,17 +25482,19 @@ def test_get_lun_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = lun.Lun.to_json(lun.Lun()) + req.return_value._content = provisioning.ProvisioningConfig.to_json( + provisioning.ProvisioningConfig() + ) - request = lun.GetLunRequest() + request = provisioning.GetProvisioningConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = lun.Lun() + post.return_value = provisioning.ProvisioningConfig() - client.get_lun( + client.get_provisioning_config( request, metadata=[ ("key", "val"), @@ -11232,8 +25506,8 @@ def test_get_lun_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_lun_rest_bad_request( - transport: str = "rest", request_type=lun.GetLunRequest +def test_get_provisioning_config_rest_bad_request( + transport: str = "rest", request_type=provisioning.GetProvisioningConfigRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11242,7 +25516,7 @@ def test_get_lun_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/volumes/sample3/luns/sample4" + "name": "projects/sample1/locations/sample2/provisioningConfigs/sample3" } request = request_type(**request_init) @@ -11255,10 +25529,10 @@ def test_get_lun_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_lun(request) + client.get_provisioning_config(request) -def test_get_lun_rest_flattened(): +def test_get_provisioning_config_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11267,11 +25541,11 @@ def test_get_lun_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = lun.Lun() + return_value = provisioning.ProvisioningConfig() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/volumes/sample3/luns/sample4" + "name": "projects/sample1/locations/sample2/provisioningConfigs/sample3" } # get truthy value for each flattened field @@ -11283,25 +25557,25 @@ def test_get_lun_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = lun.Lun.pb(return_value) + pb_return_value = provisioning.ProvisioningConfig.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_lun(**mock_args) + client.get_provisioning_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/locations/*/volumes/*/luns/*}" + "%s/v2/{name=projects/*/locations/*/provisioningConfigs/*}" % client.transport._host, args[1], ) -def test_get_lun_rest_flattened_error(transport: str = "rest"): +def test_get_provisioning_config_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11310,13 +25584,13 @@ def test_get_lun_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_lun( - lun.GetLunRequest(), + client.get_provisioning_config( + provisioning.GetProvisioningConfigRequest(), name="name_value", ) -def test_get_lun_rest_error(): +def test_get_provisioning_config_rest_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11325,45 +25599,154 @@ def test_get_lun_rest_error(): @pytest.mark.parametrize( "request_type", [ - lun.ListLunsRequest, + provisioning.CreateProvisioningConfigRequest, dict, ], ) -def test_list_luns_rest(request_type): +def test_create_provisioning_config_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["provisioning_config"] = { + "name": "name_value", + "instances": [ + { + "name": "name_value", + "id": "id_value", + "instance_type": "instance_type_value", + "hyperthreading": True, + "os_image": "os_image_value", + "client_network": { + "network_id": "network_id_value", + "address": "address_value", + "existing_network_id": "existing_network_id_value", + }, + "private_network": {}, + "user_note": "user_note_value", + "account_networks_enabled": True, + "network_config": 1, + "network_template": "network_template_value", + "logical_interfaces": [ + { + "logical_network_interfaces": [ + { + "network": "network_value", + "ip_address": "ip_address_value", + "default_gateway": True, + "network_type": 1, + "id": "id_value", + } + ], + "name": "name_value", + "interface_index": 1576, + } + ], + "ssh_key_names": ["ssh_key_names_value1", "ssh_key_names_value2"], + } + ], + "networks": [ + { + "name": "name_value", + "id": "id_value", + "type_": 1, + "bandwidth": 1, + "vlan_attachments": [ + {"id": "id_value", "pairing_key": "pairing_key_value"} + ], + "cidr": "cidr_value", + "service_cidr": 1, + "user_note": "user_note_value", + "gcp_service": "gcp_service_value", + "vlan_same_project": True, + "jumbo_frames_enabled": True, + } + ], + "volumes": [ + { + "name": "name_value", + "id": "id_value", + "snapshots_enabled": True, + "type_": 1, + "protocol": 1, + "size_gb": 739, + "lun_ranges": [{"quantity": 895, "size_gb": 739}], + "machine_ids": ["machine_ids_value1", "machine_ids_value2"], + "nfs_exports": [ + { + "network_id": "network_id_value", + "machine_id": "machine_id_value", + "cidr": "cidr_value", + "permissions": 1, + "no_root_squash": True, + "allow_suid": True, + "allow_dev": True, + } + ], + "user_note": "user_note_value", + "gcp_service": "gcp_service_value", + "performance_tier": 1, + } + ], + "ticket_id": "ticket_id_value", + "handover_service_account": "handover_service_account_value", + "email": "email_value", + "state": 1, + "location": "location_value", + "update_time": {"seconds": 751, "nanos": 543}, + "cloud_console_uri": "cloud_console_uri_value", + "vpc_sc_enabled": True, + "status_message": "status_message_value", + "custom_id": "custom_id_value", + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = lun.ListLunsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = provisioning.ProvisioningConfig( + name="name_value", + ticket_id="ticket_id_value", + handover_service_account="handover_service_account_value", + email="email_value", + state=provisioning.ProvisioningConfig.State.DRAFT, + location="location_value", + cloud_console_uri="cloud_console_uri_value", + vpc_sc_enabled=True, + status_message="status_message_value", + custom_id="custom_id_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = lun.ListLunsResponse.pb(return_value) + pb_return_value = provisioning.ProvisioningConfig.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_luns(request) + response = client.create_provisioning_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLunsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] - - -def test_list_luns_rest_required_fields(request_type=lun.ListLunsRequest): + assert isinstance(response, provisioning.ProvisioningConfig) + assert response.name == "name_value" + assert response.ticket_id == "ticket_id_value" + assert response.handover_service_account == "handover_service_account_value" + assert response.email == "email_value" + assert response.state == provisioning.ProvisioningConfig.State.DRAFT + assert response.location == "location_value" + assert response.cloud_console_uri == "cloud_console_uri_value" + assert response.vpc_sc_enabled is True + assert response.status_message == "status_message_value" + assert response.custom_id == "custom_id_value" + + +def test_create_provisioning_config_rest_required_fields( + request_type=provisioning.CreateProvisioningConfigRequest, +): transport_class = transports.BareMetalSolutionRestTransport request_init = {} @@ -11382,7 +25765,7 @@ def test_list_luns_rest_required_fields(request_type=lun.ListLunsRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_luns._get_unset_required_fields(jsonified_request) + ).create_provisioning_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -11391,14 +25774,9 @@ def test_list_luns_rest_required_fields(request_type=lun.ListLunsRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_luns._get_unset_required_fields(jsonified_request) + ).create_provisioning_config._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("email",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -11412,7 +25790,7 @@ def test_list_luns_rest_required_fields(request_type=lun.ListLunsRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = lun.ListLunsResponse() + return_value = provisioning.ProvisioningConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11424,46 +25802,47 @@ def test_list_luns_rest_required_fields(request_type=lun.ListLunsRequest): pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - pb_return_value = lun.ListLunsResponse.pb(return_value) + pb_return_value = provisioning.ProvisioningConfig.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_luns(request) + response = client.create_provisioning_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_luns_rest_unset_required_fields(): +def test_create_provisioning_config_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_luns._get_unset_required_fields({}) + unset_fields = transport.create_provisioning_config._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("email",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "provisioningConfig", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_luns_rest_interceptors(null_interceptor): +def test_create_provisioning_config_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11476,13 +25855,15 @@ def test_list_luns_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_list_luns" + transports.BareMetalSolutionRestInterceptor, "post_create_provisioning_config" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_list_luns" + transports.BareMetalSolutionRestInterceptor, "pre_create_provisioning_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = lun.ListLunsRequest.pb(lun.ListLunsRequest()) + pb_message = provisioning.CreateProvisioningConfigRequest.pb( + provisioning.CreateProvisioningConfigRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11493,17 +25874,19 @@ def test_list_luns_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = lun.ListLunsResponse.to_json(lun.ListLunsResponse()) + req.return_value._content = provisioning.ProvisioningConfig.to_json( + provisioning.ProvisioningConfig() + ) - request = lun.ListLunsRequest() + request = provisioning.CreateProvisioningConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = lun.ListLunsResponse() + post.return_value = provisioning.ProvisioningConfig() - client.list_luns( + client.create_provisioning_config( request, metadata=[ ("key", "val"), @@ -11515,8 +25898,8 @@ def test_list_luns_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_luns_rest_bad_request( - transport: str = "rest", request_type=lun.ListLunsRequest +def test_create_provisioning_config_rest_bad_request( + transport: str = "rest", request_type=provisioning.CreateProvisioningConfigRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11524,7 +25907,98 @@ def test_list_luns_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/volumes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["provisioning_config"] = { + "name": "name_value", + "instances": [ + { + "name": "name_value", + "id": "id_value", + "instance_type": "instance_type_value", + "hyperthreading": True, + "os_image": "os_image_value", + "client_network": { + "network_id": "network_id_value", + "address": "address_value", + "existing_network_id": "existing_network_id_value", + }, + "private_network": {}, + "user_note": "user_note_value", + "account_networks_enabled": True, + "network_config": 1, + "network_template": "network_template_value", + "logical_interfaces": [ + { + "logical_network_interfaces": [ + { + "network": "network_value", + "ip_address": "ip_address_value", + "default_gateway": True, + "network_type": 1, + "id": "id_value", + } + ], + "name": "name_value", + "interface_index": 1576, + } + ], + "ssh_key_names": ["ssh_key_names_value1", "ssh_key_names_value2"], + } + ], + "networks": [ + { + "name": "name_value", + "id": "id_value", + "type_": 1, + "bandwidth": 1, + "vlan_attachments": [ + {"id": "id_value", "pairing_key": "pairing_key_value"} + ], + "cidr": "cidr_value", + "service_cidr": 1, + "user_note": "user_note_value", + "gcp_service": "gcp_service_value", + "vlan_same_project": True, + "jumbo_frames_enabled": True, + } + ], + "volumes": [ + { + "name": "name_value", + "id": "id_value", + "snapshots_enabled": True, + "type_": 1, + "protocol": 1, + "size_gb": 739, + "lun_ranges": [{"quantity": 895, "size_gb": 739}], + "machine_ids": ["machine_ids_value1", "machine_ids_value2"], + "nfs_exports": [ + { + "network_id": "network_id_value", + "machine_id": "machine_id_value", + "cidr": "cidr_value", + "permissions": 1, + "no_root_squash": True, + "allow_suid": True, + "allow_dev": True, + } + ], + "user_note": "user_note_value", + "gcp_service": "gcp_service_value", + "performance_tier": 1, + } + ], + "ticket_id": "ticket_id_value", + "handover_service_account": "handover_service_account_value", + "email": "email_value", + "state": 1, + "location": "location_value", + "update_time": {"seconds": 751, "nanos": 543}, + "cloud_console_uri": "cloud_console_uri_value", + "vpc_sc_enabled": True, + "status_message": "status_message_value", + "custom_id": "custom_id_value", + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11536,10 +26010,10 @@ def test_list_luns_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_luns(request) + client.create_provisioning_config(request) -def test_list_luns_rest_flattened(): +def test_create_provisioning_config_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11548,168 +26022,219 @@ def test_list_luns_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = lun.ListLunsResponse() + return_value = provisioning.ProvisioningConfig() # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = lun.ListLunsResponse.pb(return_value) + pb_return_value = provisioning.ProvisioningConfig.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_luns(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v2/{parent=projects/*/locations/*/volumes/*}/luns" - % client.transport._host, - args[1], - ) - - -def test_list_luns_rest_flattened_error(transport: str = "rest"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_luns( - lun.ListLunsRequest(), - parent="parent_value", - ) - - -def test_list_luns_rest_pager(transport: str = "rest"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - lun.Lun(), - ], - next_page_token="abc", - ), - lun.ListLunsResponse( - luns=[], - next_page_token="def", - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - ], - next_page_token="ghi", - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - ], - ), + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_provisioning_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/locations/*}/provisioningConfigs" + % client.transport._host, + args[1], ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple(lun.ListLunsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = { - "parent": "projects/sample1/locations/sample2/volumes/sample3" - } +def test_create_provisioning_config_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_luns(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_provisioning_config( + provisioning.CreateProvisioningConfigRequest(), + parent="parent_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, lun.Lun) for i in results) - pages = list(client.list_luns(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_create_provisioning_config_rest_error(): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - nfs_share.GetNfsShareRequest, + provisioning.UpdateProvisioningConfigRequest, dict, ], ) -def test_get_nfs_share_rest(request_type): +def test_update_provisioning_config_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} + request_init = { + "provisioning_config": { + "name": "projects/sample1/locations/sample2/provisioningConfigs/sample3" + } + } + request_init["provisioning_config"] = { + "name": "projects/sample1/locations/sample2/provisioningConfigs/sample3", + "instances": [ + { + "name": "name_value", + "id": "id_value", + "instance_type": "instance_type_value", + "hyperthreading": True, + "os_image": "os_image_value", + "client_network": { + "network_id": "network_id_value", + "address": "address_value", + "existing_network_id": "existing_network_id_value", + }, + "private_network": {}, + "user_note": "user_note_value", + "account_networks_enabled": True, + "network_config": 1, + "network_template": "network_template_value", + "logical_interfaces": [ + { + "logical_network_interfaces": [ + { + "network": "network_value", + "ip_address": "ip_address_value", + "default_gateway": True, + "network_type": 1, + "id": "id_value", + } + ], + "name": "name_value", + "interface_index": 1576, + } + ], + "ssh_key_names": ["ssh_key_names_value1", "ssh_key_names_value2"], + } + ], + "networks": [ + { + "name": "name_value", + "id": "id_value", + "type_": 1, + "bandwidth": 1, + "vlan_attachments": [ + {"id": "id_value", "pairing_key": "pairing_key_value"} + ], + "cidr": "cidr_value", + "service_cidr": 1, + "user_note": "user_note_value", + "gcp_service": "gcp_service_value", + "vlan_same_project": True, + "jumbo_frames_enabled": True, + } + ], + "volumes": [ + { + "name": "name_value", + "id": "id_value", + "snapshots_enabled": True, + "type_": 1, + "protocol": 1, + "size_gb": 739, + "lun_ranges": [{"quantity": 895, "size_gb": 739}], + "machine_ids": ["machine_ids_value1", "machine_ids_value2"], + "nfs_exports": [ + { + "network_id": "network_id_value", + "machine_id": "machine_id_value", + "cidr": "cidr_value", + "permissions": 1, + "no_root_squash": True, + "allow_suid": True, + "allow_dev": True, + } + ], + "user_note": "user_note_value", + "gcp_service": "gcp_service_value", + "performance_tier": 1, + } + ], + "ticket_id": "ticket_id_value", + "handover_service_account": "handover_service_account_value", + "email": "email_value", + "state": 1, + "location": "location_value", + "update_time": {"seconds": 751, "nanos": 543}, + "cloud_console_uri": "cloud_console_uri_value", + "vpc_sc_enabled": True, + "status_message": "status_message_value", + "custom_id": "custom_id_value", + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = nfs_share.NfsShare( + return_value = provisioning.ProvisioningConfig( name="name_value", - nfs_share_id="nfs_share_id_value", - state=nfs_share.NfsShare.State.PROVISIONED, - volume="volume_value", + ticket_id="ticket_id_value", + handover_service_account="handover_service_account_value", + email="email_value", + state=provisioning.ProvisioningConfig.State.DRAFT, + location="location_value", + cloud_console_uri="cloud_console_uri_value", + vpc_sc_enabled=True, + status_message="status_message_value", + custom_id="custom_id_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = nfs_share.NfsShare.pb(return_value) + pb_return_value = provisioning.ProvisioningConfig.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_nfs_share(request) + response = client.update_provisioning_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, nfs_share.NfsShare) + assert isinstance(response, provisioning.ProvisioningConfig) assert response.name == "name_value" - assert response.nfs_share_id == "nfs_share_id_value" - assert response.state == nfs_share.NfsShare.State.PROVISIONED - assert response.volume == "volume_value" - - -def test_get_nfs_share_rest_required_fields(request_type=nfs_share.GetNfsShareRequest): + assert response.ticket_id == "ticket_id_value" + assert response.handover_service_account == "handover_service_account_value" + assert response.email == "email_value" + assert response.state == provisioning.ProvisioningConfig.State.DRAFT + assert response.location == "location_value" + assert response.cloud_console_uri == "cloud_console_uri_value" + assert response.vpc_sc_enabled is True + assert response.status_message == "status_message_value" + assert response.custom_id == "custom_id_value" + + +def test_update_provisioning_config_rest_required_fields( + request_type=provisioning.UpdateProvisioningConfigRequest, +): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11724,21 +26249,24 @@ def test_get_nfs_share_rest_required_fields(request_type=nfs_share.GetNfsShareRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_nfs_share._get_unset_required_fields(jsonified_request) + ).update_provisioning_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_nfs_share._get_unset_required_fields(jsonified_request) + ).update_provisioning_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "email", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11747,7 +26275,7 @@ def test_get_nfs_share_rest_required_fields(request_type=nfs_share.GetNfsShareRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = nfs_share.NfsShare() + return_value = provisioning.ProvisioningConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11759,38 +26287,52 @@ def test_get_nfs_share_rest_required_fields(request_type=nfs_share.GetNfsShareRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - pb_return_value = nfs_share.NfsShare.pb(return_value) + pb_return_value = provisioning.ProvisioningConfig.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_nfs_share(request) + response = client.update_provisioning_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_nfs_share_rest_unset_required_fields(): +def test_update_provisioning_config_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_nfs_share._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_provisioning_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "email", + "updateMask", + ) + ) + & set( + ( + "provisioningConfig", + "updateMask", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_nfs_share_rest_interceptors(null_interceptor): +def test_update_provisioning_config_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11803,13 +26345,15 @@ def test_get_nfs_share_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_get_nfs_share" + transports.BareMetalSolutionRestInterceptor, "post_update_provisioning_config" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_get_nfs_share" + transports.BareMetalSolutionRestInterceptor, "pre_update_provisioning_config" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = nfs_share.GetNfsShareRequest.pb(nfs_share.GetNfsShareRequest()) + pb_message = provisioning.UpdateProvisioningConfigRequest.pb( + provisioning.UpdateProvisioningConfigRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11820,17 +26364,19 @@ def test_get_nfs_share_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = nfs_share.NfsShare.to_json(nfs_share.NfsShare()) + req.return_value._content = provisioning.ProvisioningConfig.to_json( + provisioning.ProvisioningConfig() + ) - request = nfs_share.GetNfsShareRequest() + request = provisioning.UpdateProvisioningConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = nfs_share.NfsShare() + post.return_value = provisioning.ProvisioningConfig() - client.get_nfs_share( + client.update_provisioning_config( request, metadata=[ ("key", "val"), @@ -11842,8 +26388,8 @@ def test_get_nfs_share_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_nfs_share_rest_bad_request( - transport: str = "rest", request_type=nfs_share.GetNfsShareRequest +def test_update_provisioning_config_rest_bad_request( + transport: str = "rest", request_type=provisioning.UpdateProvisioningConfigRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11851,7 +26397,102 @@ def test_get_nfs_share_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} + request_init = { + "provisioning_config": { + "name": "projects/sample1/locations/sample2/provisioningConfigs/sample3" + } + } + request_init["provisioning_config"] = { + "name": "projects/sample1/locations/sample2/provisioningConfigs/sample3", + "instances": [ + { + "name": "name_value", + "id": "id_value", + "instance_type": "instance_type_value", + "hyperthreading": True, + "os_image": "os_image_value", + "client_network": { + "network_id": "network_id_value", + "address": "address_value", + "existing_network_id": "existing_network_id_value", + }, + "private_network": {}, + "user_note": "user_note_value", + "account_networks_enabled": True, + "network_config": 1, + "network_template": "network_template_value", + "logical_interfaces": [ + { + "logical_network_interfaces": [ + { + "network": "network_value", + "ip_address": "ip_address_value", + "default_gateway": True, + "network_type": 1, + "id": "id_value", + } + ], + "name": "name_value", + "interface_index": 1576, + } + ], + "ssh_key_names": ["ssh_key_names_value1", "ssh_key_names_value2"], + } + ], + "networks": [ + { + "name": "name_value", + "id": "id_value", + "type_": 1, + "bandwidth": 1, + "vlan_attachments": [ + {"id": "id_value", "pairing_key": "pairing_key_value"} + ], + "cidr": "cidr_value", + "service_cidr": 1, + "user_note": "user_note_value", + "gcp_service": "gcp_service_value", + "vlan_same_project": True, + "jumbo_frames_enabled": True, + } + ], + "volumes": [ + { + "name": "name_value", + "id": "id_value", + "snapshots_enabled": True, + "type_": 1, + "protocol": 1, + "size_gb": 739, + "lun_ranges": [{"quantity": 895, "size_gb": 739}], + "machine_ids": ["machine_ids_value1", "machine_ids_value2"], + "nfs_exports": [ + { + "network_id": "network_id_value", + "machine_id": "machine_id_value", + "cidr": "cidr_value", + "permissions": 1, + "no_root_squash": True, + "allow_suid": True, + "allow_dev": True, + } + ], + "user_note": "user_note_value", + "gcp_service": "gcp_service_value", + "performance_tier": 1, + } + ], + "ticket_id": "ticket_id_value", + "handover_service_account": "handover_service_account_value", + "email": "email_value", + "state": 1, + "location": "location_value", + "update_time": {"seconds": 751, "nanos": 543}, + "cloud_console_uri": "cloud_console_uri_value", + "vpc_sc_enabled": True, + "status_message": "status_message_value", + "custom_id": "custom_id_value", + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11863,10 +26504,10 @@ def test_get_nfs_share_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_nfs_share(request) + client.update_provisioning_config(request) -def test_get_nfs_share_rest_flattened(): +def test_update_provisioning_config_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11875,40 +26516,44 @@ def test_get_nfs_share_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = nfs_share.NfsShare() + return_value = provisioning.ProvisioningConfig() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/nfsShares/sample3" + "provisioning_config": { + "name": "projects/sample1/locations/sample2/provisioningConfigs/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = nfs_share.NfsShare.pb(return_value) + pb_return_value = provisioning.ProvisioningConfig.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_nfs_share(**mock_args) + client.update_provisioning_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{name=projects/*/locations/*/nfsShares/*}" % client.transport._host, + "%s/v2/{provisioning_config.name=projects/*/locations/*/provisioningConfigs/*}" + % client.transport._host, args[1], ) -def test_get_nfs_share_rest_flattened_error(transport: str = "rest"): +def test_update_provisioning_config_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11917,13 +26562,14 @@ def test_get_nfs_share_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_nfs_share( - nfs_share.GetNfsShareRequest(), - name="name_value", + client.update_provisioning_config( + provisioning.UpdateProvisioningConfigRequest(), + provisioning_config=provisioning.ProvisioningConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_nfs_share_rest_error(): +def test_update_provisioning_config_rest_error(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11932,51 +26578,70 @@ def test_get_nfs_share_rest_error(): @pytest.mark.parametrize( "request_type", [ - nfs_share.ListNfsSharesRequest, + network.RenameNetworkRequest, dict, ], ) -def test_list_nfs_shares_rest(request_type): +def test_rename_network_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/networks/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = nfs_share.ListNfsSharesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = network.Network( + name="name_value", + id="id_value", + type_=network.Network.Type.CLIENT, + ip_address="ip_address_value", + mac_address=["mac_address_value"], + state=network.Network.State.PROVISIONING, + vlan_id="vlan_id_value", + cidr="cidr_value", + services_cidr="services_cidr_value", + pod="pod_value", + jumbo_frames_enabled=True, + gateway_ip="gateway_ip_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = nfs_share.ListNfsSharesResponse.pb(return_value) + pb_return_value = network.Network.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_nfs_shares(request) + response = client.rename_network(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNfsSharesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, network.Network) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.type_ == network.Network.Type.CLIENT + assert response.ip_address == "ip_address_value" + assert response.mac_address == ["mac_address_value"] + assert response.state == network.Network.State.PROVISIONING + assert response.vlan_id == "vlan_id_value" + assert response.cidr == "cidr_value" + assert response.services_cidr == "services_cidr_value" + assert response.pod == "pod_value" + assert response.jumbo_frames_enabled is True + assert response.gateway_ip == "gateway_ip_value" -def test_list_nfs_shares_rest_required_fields( - request_type=nfs_share.ListNfsSharesRequest, -): +def test_rename_network_rest_required_fields(request_type=network.RenameNetworkRequest): transport_class = transports.BareMetalSolutionRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" + request_init["new_network_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11991,29 +26656,24 @@ def test_list_nfs_shares_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_nfs_shares._get_unset_required_fields(jsonified_request) + ).rename_network._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" + jsonified_request["newNetworkId"] = "new_network_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_nfs_shares._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).rename_network._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "newNetworkId" in jsonified_request + assert jsonified_request["newNetworkId"] == "new_network_id_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12022,7 +26682,7 @@ def test_list_nfs_shares_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = nfs_share.ListNfsSharesResponse() + return_value = network.Network() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12034,47 +26694,47 @@ def test_list_nfs_shares_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - pb_return_value = nfs_share.ListNfsSharesResponse.pb(return_value) + pb_return_value = network.Network.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_nfs_shares(request) + response = client.rename_network(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_nfs_shares_rest_unset_required_fields(): +def test_rename_network_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_nfs_shares._get_unset_required_fields({}) + unset_fields = transport.rename_network._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "filter", - "pageSize", - "pageToken", + "name", + "newNetworkId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_nfs_shares_rest_interceptors(null_interceptor): +def test_rename_network_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12087,13 +26747,13 @@ def test_list_nfs_shares_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_list_nfs_shares" + transports.BareMetalSolutionRestInterceptor, "post_rename_network" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_list_nfs_shares" + transports.BareMetalSolutionRestInterceptor, "pre_rename_network" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = nfs_share.ListNfsSharesRequest.pb(nfs_share.ListNfsSharesRequest()) + pb_message = network.RenameNetworkRequest.pb(network.RenameNetworkRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12104,19 +26764,17 @@ def test_list_nfs_shares_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = nfs_share.ListNfsSharesResponse.to_json( - nfs_share.ListNfsSharesResponse() - ) + req.return_value._content = network.Network.to_json(network.Network()) - request = nfs_share.ListNfsSharesRequest() + request = network.RenameNetworkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = nfs_share.ListNfsSharesResponse() + post.return_value = network.Network() - client.list_nfs_shares( + client.rename_network( request, metadata=[ ("key", "val"), @@ -12128,8 +26786,8 @@ def test_list_nfs_shares_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_nfs_shares_rest_bad_request( - transport: str = "rest", request_type=nfs_share.ListNfsSharesRequest +def test_rename_network_rest_bad_request( + transport: str = "rest", request_type=network.RenameNetworkRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12137,7 +26795,7 @@ def test_list_nfs_shares_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/networks/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12149,10 +26807,10 @@ def test_list_nfs_shares_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_nfs_shares(request) + client.rename_network(request) -def test_list_nfs_shares_rest_flattened(): +def test_rename_network_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12161,38 +26819,40 @@ def test_list_nfs_shares_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = nfs_share.ListNfsSharesResponse() + return_value = network.Network() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/networks/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", + new_network_id="new_network_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = nfs_share.ListNfsSharesResponse.pb(return_value) + pb_return_value = network.Network.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_nfs_shares(**mock_args) + client.rename_network(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{parent=projects/*/locations/*}/nfsShares" % client.transport._host, + "%s/v2/{name=projects/*/locations/*/networks/*}:rename" + % client.transport._host, args[1], ) -def test_list_nfs_shares_rest_flattened_error(transport: str = "rest"): +def test_rename_network_rest_flattened_error(transport: str = "rest"): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12201,134 +26861,63 @@ def test_list_nfs_shares_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_nfs_shares( - nfs_share.ListNfsSharesRequest(), - parent="parent_value", + client.rename_network( + network.RenameNetworkRequest(), + name="name_value", + new_network_id="new_network_id_value", ) -def test_list_nfs_shares_rest_pager(transport: str = "rest"): +def test_rename_network_rest_error(): client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - next_page_token="abc", - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[], - next_page_token="def", - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - ], - next_page_token="ghi", - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(nfs_share.ListNfsSharesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_nfs_shares(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, nfs_share.NfsShare) for i in results) - - pages = list(client.list_nfs_shares(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - gcb_nfs_share.UpdateNfsShareRequest, + osimage.ListOSImagesRequest, dict, ], ) -def test_update_nfs_share_rest(request_type): +def test_list_os_images_rest(request_type): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "nfs_share": {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} - } - request_init["nfs_share"] = { - "name": "projects/sample1/locations/sample2/nfsShares/sample3", - "nfs_share_id": "nfs_share_id_value", - "state": 1, - "volume": "volume_value", - "allowed_clients": [ - { - "network": "network_value", - "share_ip": "share_ip_value", - "allowed_clients_cidr": "allowed_clients_cidr_value", - "mount_permissions": 1, - "allow_dev": True, - "allow_suid": True, - "no_root_squash": True, - } - ], - "labels": {}, - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = osimage.ListOSImagesResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = osimage.ListOSImagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_nfs_share(request) + response = client.list_os_images(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListOSImagesPager) + assert response.next_page_token == "next_page_token_value" -def test_update_nfs_share_rest_required_fields( - request_type=gcb_nfs_share.UpdateNfsShareRequest, -): +def test_list_os_images_rest_required_fields(request_type=osimage.ListOSImagesRequest): transport_class = transports.BareMetalSolutionRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12343,19 +26932,28 @@ def test_update_nfs_share_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_nfs_share._get_unset_required_fields(jsonified_request) + ).list_os_images._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_nfs_share._get_unset_required_fields(jsonified_request) + ).list_os_images._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12364,7 +26962,7 @@ def test_update_nfs_share_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = osimage.ListOSImagesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12376,37 +26974,46 @@ def test_update_nfs_share_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = osimage.ListOSImagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_nfs_share(request) + response = client.list_os_images(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_nfs_share_rest_unset_required_fields(): +def test_list_os_images_rest_unset_required_fields(): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_nfs_share._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("nfsShare",))) + unset_fields = transport.list_os_images._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_nfs_share_rest_interceptors(null_interceptor): +def test_list_os_images_rest_interceptors(null_interceptor): transport = transports.BareMetalSolutionRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12419,17 +27026,13 @@ def test_update_nfs_share_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "post_update_nfs_share" + transports.BareMetalSolutionRestInterceptor, "post_list_os_images" ) as post, mock.patch.object( - transports.BareMetalSolutionRestInterceptor, "pre_update_nfs_share" + transports.BareMetalSolutionRestInterceptor, "pre_list_os_images" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = gcb_nfs_share.UpdateNfsShareRequest.pb( - gcb_nfs_share.UpdateNfsShareRequest() - ) + pb_message = osimage.ListOSImagesRequest.pb(osimage.ListOSImagesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12440,19 +27043,19 @@ def test_update_nfs_share_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = osimage.ListOSImagesResponse.to_json( + osimage.ListOSImagesResponse() ) - request = gcb_nfs_share.UpdateNfsShareRequest() + request = osimage.ListOSImagesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = osimage.ListOSImagesResponse() - client.update_nfs_share( + client.list_os_images( request, metadata=[ ("key", "val"), @@ -12464,8 +27067,8 @@ def test_update_nfs_share_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_nfs_share_rest_bad_request( - transport: str = "rest", request_type=gcb_nfs_share.UpdateNfsShareRequest +def test_list_os_images_rest_bad_request( + transport: str = "rest", request_type=osimage.ListOSImagesRequest ): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12473,27 +27076,7 @@ def test_update_nfs_share_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "nfs_share": {"name": "projects/sample1/locations/sample2/nfsShares/sample3"} - } - request_init["nfs_share"] = { - "name": "projects/sample1/locations/sample2/nfsShares/sample3", - "nfs_share_id": "nfs_share_id_value", - "state": 1, - "volume": "volume_value", - "allowed_clients": [ - { - "network": "network_value", - "share_ip": "share_ip_value", - "allowed_clients_cidr": "allowed_clients_cidr_value", - "mount_permissions": 1, - "allow_dev": True, - "allow_suid": True, - "no_root_squash": True, - } - ], - "labels": {}, - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12505,10 +27088,10 @@ def test_update_nfs_share_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_nfs_share(request) + client.list_os_images(request) -def test_update_nfs_share_rest_flattened(): +def test_list_os_images_rest_flattened(): client = BareMetalSolutionClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12517,62 +27100,111 @@ def test_update_nfs_share_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = osimage.ListOSImagesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "nfs_share": { - "name": "projects/sample1/locations/sample2/nfsShares/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - nfs_share=gcb_nfs_share.NfsShare(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = osimage.ListOSImagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_nfs_share(**mock_args) + client.list_os_images(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v2/{nfs_share.name=projects/*/locations/*/nfsShares/*}" - % client.transport._host, + "%s/v2/{parent=projects/*/locations/*}/osImages" % client.transport._host, args[1], ) -def test_update_nfs_share_rest_flattened_error(transport: str = "rest"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_list_os_images_rest_flattened_error(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_images( + osimage.ListOSImagesRequest(), + parent="parent_value", + ) + + +def test_list_os_images_rest_pager(transport: str = "rest"): + client = BareMetalSolutionClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + osimage.OSImage(), + osimage.OSImage(), + ], + next_page_token="abc", + ), + osimage.ListOSImagesResponse( + os_images=[], + next_page_token="def", + ), + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + ], + next_page_token="ghi", + ), + osimage.ListOSImagesResponse( + os_images=[ + osimage.OSImage(), + osimage.OSImage(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(osimage.ListOSImagesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_nfs_share( - gcb_nfs_share.UpdateNfsShareRequest(), - nfs_share=gcb_nfs_share.NfsShare(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + pager = client.list_os_images(request=sample_request) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, osimage.OSImage) for i in results) -def test_update_nfs_share_rest_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + pages = list(client.list_os_images(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_credentials_transport_error(): @@ -12717,23 +27349,47 @@ def test_bare_metal_solution_base_transport(): "list_instances", "get_instance", "update_instance", + "rename_instance", "reset_instance", "start_instance", "stop_instance", + "enable_interactive_serial_console", + "disable_interactive_serial_console", "detach_lun", + "list_ssh_keys", + "create_ssh_key", + "delete_ssh_key", "list_volumes", "get_volume", "update_volume", + "rename_volume", + "evict_volume", "resize_volume", "list_networks", "list_network_usage", "get_network", "update_network", + "create_volume_snapshot", + "restore_volume_snapshot", + "delete_volume_snapshot", + "get_volume_snapshot", + "list_volume_snapshots", "get_lun", "list_luns", + "evict_lun", "get_nfs_share", "list_nfs_shares", "update_nfs_share", + "create_nfs_share", + "rename_nfs_share", + "delete_nfs_share", + "list_provisioning_quotas", + "submit_provisioning_config", + "get_provisioning_config", + "create_provisioning_config", + "update_provisioning_config", + "rename_network", + "list_os_images", "get_location", "list_locations", ) @@ -13027,6 +27683,9 @@ def test_bare_metal_solution_client_transport_session_collision(transport_name): session1 = client1.transport.update_instance._session session2 = client2.transport.update_instance._session assert session1 != session2 + session1 = client1.transport.rename_instance._session + session2 = client2.transport.rename_instance._session + assert session1 != session2 session1 = client1.transport.reset_instance._session session2 = client2.transport.reset_instance._session assert session1 != session2 @@ -13036,9 +27695,24 @@ def test_bare_metal_solution_client_transport_session_collision(transport_name): session1 = client1.transport.stop_instance._session session2 = client2.transport.stop_instance._session assert session1 != session2 + session1 = client1.transport.enable_interactive_serial_console._session + session2 = client2.transport.enable_interactive_serial_console._session + assert session1 != session2 + session1 = client1.transport.disable_interactive_serial_console._session + session2 = client2.transport.disable_interactive_serial_console._session + assert session1 != session2 session1 = client1.transport.detach_lun._session session2 = client2.transport.detach_lun._session assert session1 != session2 + session1 = client1.transport.list_ssh_keys._session + session2 = client2.transport.list_ssh_keys._session + assert session1 != session2 + session1 = client1.transport.create_ssh_key._session + session2 = client2.transport.create_ssh_key._session + assert session1 != session2 + session1 = client1.transport.delete_ssh_key._session + session2 = client2.transport.delete_ssh_key._session + assert session1 != session2 session1 = client1.transport.list_volumes._session session2 = client2.transport.list_volumes._session assert session1 != session2 @@ -13048,6 +27722,12 @@ def test_bare_metal_solution_client_transport_session_collision(transport_name): session1 = client1.transport.update_volume._session session2 = client2.transport.update_volume._session assert session1 != session2 + session1 = client1.transport.rename_volume._session + session2 = client2.transport.rename_volume._session + assert session1 != session2 + session1 = client1.transport.evict_volume._session + session2 = client2.transport.evict_volume._session + assert session1 != session2 session1 = client1.transport.resize_volume._session session2 = client2.transport.resize_volume._session assert session1 != session2 @@ -13063,12 +27743,30 @@ def test_bare_metal_solution_client_transport_session_collision(transport_name): session1 = client1.transport.update_network._session session2 = client2.transport.update_network._session assert session1 != session2 + session1 = client1.transport.create_volume_snapshot._session + session2 = client2.transport.create_volume_snapshot._session + assert session1 != session2 + session1 = client1.transport.restore_volume_snapshot._session + session2 = client2.transport.restore_volume_snapshot._session + assert session1 != session2 + session1 = client1.transport.delete_volume_snapshot._session + session2 = client2.transport.delete_volume_snapshot._session + assert session1 != session2 + session1 = client1.transport.get_volume_snapshot._session + session2 = client2.transport.get_volume_snapshot._session + assert session1 != session2 + session1 = client1.transport.list_volume_snapshots._session + session2 = client2.transport.list_volume_snapshots._session + assert session1 != session2 session1 = client1.transport.get_lun._session session2 = client2.transport.get_lun._session assert session1 != session2 session1 = client1.transport.list_luns._session session2 = client2.transport.list_luns._session assert session1 != session2 + session1 = client1.transport.evict_lun._session + session2 = client2.transport.evict_lun._session + assert session1 != session2 session1 = client1.transport.get_nfs_share._session session2 = client2.transport.get_nfs_share._session assert session1 != session2 @@ -13078,6 +27776,36 @@ def test_bare_metal_solution_client_transport_session_collision(transport_name): session1 = client1.transport.update_nfs_share._session session2 = client2.transport.update_nfs_share._session assert session1 != session2 + session1 = client1.transport.create_nfs_share._session + session2 = client2.transport.create_nfs_share._session + assert session1 != session2 + session1 = client1.transport.rename_nfs_share._session + session2 = client2.transport.rename_nfs_share._session + assert session1 != session2 + session1 = client1.transport.delete_nfs_share._session + session2 = client2.transport.delete_nfs_share._session + assert session1 != session2 + session1 = client1.transport.list_provisioning_quotas._session + session2 = client2.transport.list_provisioning_quotas._session + assert session1 != session2 + session1 = client1.transport.submit_provisioning_config._session + session2 = client2.transport.submit_provisioning_config._session + assert session1 != session2 + session1 = client1.transport.get_provisioning_config._session + session2 = client2.transport.get_provisioning_config._session + assert session1 != session2 + session1 = client1.transport.create_provisioning_config._session + session2 = client2.transport.create_provisioning_config._session + assert session1 != session2 + session1 = client1.transport.update_provisioning_config._session + session2 = client2.transport.update_provisioning_config._session + assert session1 != session2 + session1 = client1.transport.rename_network._session + session2 = client2.transport.rename_network._session + assert session1 != session2 + session1 = client1.transport.list_os_images._session + session2 = client2.transport.list_os_images._session + assert session1 != session2 def test_bare_metal_solution_grpc_transport_channel(): @@ -13266,11 +27994,95 @@ def test_parse_instance_path(): assert expected == actual -def test_lun_path(): +def test_instance_config_path(): project = "cuttlefish" location = "mussel" - volume = "winkle" - lun = "nautilus" + instance_config = "winkle" + expected = "projects/{project}/locations/{location}/instanceConfigs/{instance_config}".format( + project=project, + location=location, + instance_config=instance_config, + ) + actual = BareMetalSolutionClient.instance_config_path( + project, location, instance_config + ) + assert expected == actual + + +def test_parse_instance_config_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "instance_config": "abalone", + } + path = BareMetalSolutionClient.instance_config_path(**expected) + + # Check that the path construction is reversible. + actual = BareMetalSolutionClient.parse_instance_config_path(path) + assert expected == actual + + +def test_instance_quota_path(): + project = "squid" + location = "clam" + instance_quota = "whelk" + expected = "projects/{project}/locations/{location}/instanceQuotas/{instance_quota}".format( + project=project, + location=location, + instance_quota=instance_quota, + ) + actual = BareMetalSolutionClient.instance_quota_path( + project, location, instance_quota + ) + assert expected == actual + + +def test_parse_instance_quota_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance_quota": "nudibranch", + } + path = BareMetalSolutionClient.instance_quota_path(**expected) + + # Check that the path construction is reversible. + actual = BareMetalSolutionClient.parse_instance_quota_path(path) + assert expected == actual + + +def test_interconnect_attachment_path(): + project = "cuttlefish" + region = "mussel" + interconnect_attachment = "winkle" + expected = "projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}".format( + project=project, + region=region, + interconnect_attachment=interconnect_attachment, + ) + actual = BareMetalSolutionClient.interconnect_attachment_path( + project, region, interconnect_attachment + ) + assert expected == actual + + +def test_parse_interconnect_attachment_path(): + expected = { + "project": "nautilus", + "region": "scallop", + "interconnect_attachment": "abalone", + } + path = BareMetalSolutionClient.interconnect_attachment_path(**expected) + + # Check that the path construction is reversible. + actual = BareMetalSolutionClient.parse_interconnect_attachment_path(path) + assert expected == actual + + +def test_lun_path(): + project = "squid" + location = "clam" + volume = "whelk" + lun = "octopus" expected = ( "projects/{project}/locations/{location}/volumes/{volume}/luns/{lun}".format( project=project, @@ -13285,10 +28097,10 @@ def test_lun_path(): def test_parse_lun_path(): expected = { - "project": "scallop", - "location": "abalone", - "volume": "squid", - "lun": "clam", + "project": "oyster", + "location": "nudibranch", + "volume": "cuttlefish", + "lun": "mussel", } path = BareMetalSolutionClient.lun_path(**expected) @@ -13298,9 +28110,9 @@ def test_parse_lun_path(): def test_network_path(): - project = "whelk" - location = "octopus" - network = "oyster" + project = "winkle" + location = "nautilus" + network = "scallop" expected = "projects/{project}/locations/{location}/networks/{network}".format( project=project, location=location, @@ -13312,9 +28124,9 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "network": "mussel", + "project": "abalone", + "location": "squid", + "network": "clam", } path = BareMetalSolutionClient.network_path(**expected) @@ -13323,6 +28135,34 @@ def test_parse_network_path(): assert expected == actual +def test_network_config_path(): + project = "whelk" + location = "octopus" + network_config = "oyster" + expected = "projects/{project}/locations/{location}/networkConfigs/{network_config}".format( + project=project, + location=location, + network_config=network_config, + ) + actual = BareMetalSolutionClient.network_config_path( + project, location, network_config + ) + assert expected == actual + + +def test_parse_network_config_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "network_config": "mussel", + } + path = BareMetalSolutionClient.network_config_path(**expected) + + # Check that the path construction is reversible. + actual = BareMetalSolutionClient.parse_network_config_path(path) + assert expected == actual + + def test_nfs_share_path(): project = "winkle" location = "nautilus" @@ -13349,10 +28189,92 @@ def test_parse_nfs_share_path(): assert expected == actual -def test_server_network_template_path(): +def test_os_image_path(): + project = "whelk" + location = "octopus" + os_image = "oyster" + expected = "projects/{project}/locations/{location}/osImages/{os_image}".format( + project=project, + location=location, + os_image=os_image, + ) + actual = BareMetalSolutionClient.os_image_path(project, location, os_image) + assert expected == actual + + +def test_parse_os_image_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "os_image": "mussel", + } + path = BareMetalSolutionClient.os_image_path(**expected) + + # Check that the path construction is reversible. + actual = BareMetalSolutionClient.parse_os_image_path(path) + assert expected == actual + + +def test_provisioning_config_path(): + project = "winkle" + location = "nautilus" + provisioning_config = "scallop" + expected = "projects/{project}/locations/{location}/provisioningConfigs/{provisioning_config}".format( + project=project, + location=location, + provisioning_config=provisioning_config, + ) + actual = BareMetalSolutionClient.provisioning_config_path( + project, location, provisioning_config + ) + assert expected == actual + + +def test_parse_provisioning_config_path(): + expected = { + "project": "abalone", + "location": "squid", + "provisioning_config": "clam", + } + path = BareMetalSolutionClient.provisioning_config_path(**expected) + + # Check that the path construction is reversible. + actual = BareMetalSolutionClient.parse_provisioning_config_path(path) + assert expected == actual + + +def test_provisioning_quota_path(): project = "whelk" location = "octopus" - server_network_template = "oyster" + provisioning_quota = "oyster" + expected = "projects/{project}/locations/{location}/provisioningQuotas/{provisioning_quota}".format( + project=project, + location=location, + provisioning_quota=provisioning_quota, + ) + actual = BareMetalSolutionClient.provisioning_quota_path( + project, location, provisioning_quota + ) + assert expected == actual + + +def test_parse_provisioning_quota_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "provisioning_quota": "mussel", + } + path = BareMetalSolutionClient.provisioning_quota_path(**expected) + + # Check that the path construction is reversible. + actual = BareMetalSolutionClient.parse_provisioning_quota_path(path) + assert expected == actual + + +def test_server_network_template_path(): + project = "winkle" + location = "nautilus" + server_network_template = "scallop" expected = "projects/{project}/locations/{location}/serverNetworkTemplate/{server_network_template}".format( project=project, location=location, @@ -13366,9 +28288,9 @@ def test_server_network_template_path(): def test_parse_server_network_template_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "server_network_template": "mussel", + "project": "abalone", + "location": "squid", + "server_network_template": "clam", } path = BareMetalSolutionClient.server_network_template_path(**expected) @@ -13377,6 +28299,32 @@ def test_parse_server_network_template_path(): assert expected == actual +def test_ssh_key_path(): + project = "whelk" + location = "octopus" + ssh_key = "oyster" + expected = "projects/{project}/locations/{location}/sshKeys/{ssh_key}".format( + project=project, + location=location, + ssh_key=ssh_key, + ) + actual = BareMetalSolutionClient.ssh_key_path(project, location, ssh_key) + assert expected == actual + + +def test_parse_ssh_key_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "ssh_key": "mussel", + } + path = BareMetalSolutionClient.ssh_key_path(**expected) + + # Check that the path construction is reversible. + actual = BareMetalSolutionClient.parse_ssh_key_path(path) + assert expected == actual + + def test_volume_path(): project = "winkle" location = "nautilus" @@ -13403,8 +28351,69 @@ def test_parse_volume_path(): assert expected == actual +def test_volume_config_path(): + project = "whelk" + location = "octopus" + volume_config = "oyster" + expected = ( + "projects/{project}/locations/{location}/volumeConfigs/{volume_config}".format( + project=project, + location=location, + volume_config=volume_config, + ) + ) + actual = BareMetalSolutionClient.volume_config_path( + project, location, volume_config + ) + assert expected == actual + + +def test_parse_volume_config_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "volume_config": "mussel", + } + path = BareMetalSolutionClient.volume_config_path(**expected) + + # Check that the path construction is reversible. + actual = BareMetalSolutionClient.parse_volume_config_path(path) + assert expected == actual + + +def test_volume_snapshot_path(): + project = "winkle" + location = "nautilus" + volume = "scallop" + snapshot = "abalone" + expected = "projects/{project}/locations/{location}/volumes/{volume}/snapshots/{snapshot}".format( + project=project, + location=location, + volume=volume, + snapshot=snapshot, + ) + actual = BareMetalSolutionClient.volume_snapshot_path( + project, location, volume, snapshot + ) + assert expected == actual + + +def test_parse_volume_snapshot_path(): + expected = { + "project": "squid", + "location": "clam", + "volume": "whelk", + "snapshot": "octopus", + } + path = BareMetalSolutionClient.volume_snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = BareMetalSolutionClient.parse_volume_snapshot_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -13414,7 +28423,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "nudibranch", } path = BareMetalSolutionClient.common_billing_account_path(**expected) @@ -13424,7 +28433,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -13434,7 +28443,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "mussel", } path = BareMetalSolutionClient.common_folder_path(**expected) @@ -13444,7 +28453,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -13454,7 +28463,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "nautilus", } path = BareMetalSolutionClient.common_organization_path(**expected) @@ -13464,7 +28473,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -13474,7 +28483,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "abalone", } path = BareMetalSolutionClient.common_project_path(**expected) @@ -13484,8 +28493,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -13496,8 +28505,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "whelk", + "location": "octopus", } path = BareMetalSolutionClient.common_location_path(**expected) diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index 4da6dff17c43..9c6974b418ac 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,35 @@ # Changelog +## [0.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.16.0...google-cloud-batch-v0.17.0) (2023-08-31) + + +### Features + +* add Batch Managed Container support for v1alpha ([37e457c](https://github.com/googleapis/google-cloud-python/commit/37e457c74eccc838771cba93d216afc4be97030f)) +* Add more compute resource API descriptions to match with VM's machine type field ([37e457c](https://github.com/googleapis/google-cloud-python/commit/37e457c74eccc838771cba93d216afc4be97030f)) +* add stderr_snippet to indicate the real stderr output by runnables to the execution field of status event ([1a8670d](https://github.com/googleapis/google-cloud-python/commit/1a8670df87e7a840cee211bbf17794dc0114d840)) +* Clarify Batch API proto doc about pubsub notifications ([37e457c](https://github.com/googleapis/google-cloud-python/commit/37e457c74eccc838771cba93d216afc4be97030f)) + + +### Documentation + +* Clarify Batch API proto doc about pubsub notifications ([1a8670d](https://github.com/googleapis/google-cloud-python/commit/1a8670df87e7a840cee211bbf17794dc0114d840)) +* Expand compute resource API docs to match with VM's machine type field ([1a8670d](https://github.com/googleapis/google-cloud-python/commit/1a8670df87e7a840cee211bbf17794dc0114d840)) +* Update description on size_gb in disk field ([#11615](https://github.com/googleapis/google-cloud-python/issues/11615)) ([d46f714](https://github.com/googleapis/google-cloud-python/commit/d46f7142e4e50f4a3dedb01e9fa574ebb29ce50e)) + +## [0.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.15.0...google-cloud-batch-v0.16.0) (2023-08-09) + + +### Features + +* Add Batch Managed Container support for v1alpha ([0e7f0b0](https://github.com/googleapis/google-cloud-python/commit/0e7f0b07e4b6149c8e573cab6f82667f1fe50cf6)) +* Clarify Batch API proto doc about pubsub notifications ([0e7f0b0](https://github.com/googleapis/google-cloud-python/commit/0e7f0b07e4b6149c8e573cab6f82667f1fe50cf6)) + + +### Documentation + +* Clarify Batch API proto doc about pubsub notifications ([#11550](https://github.com/googleapis/google-cloud-python/issues/11550)) ([4a8107a](https://github.com/googleapis/google-cloud-python/commit/4a8107a7dd492249807702cdc406c9d9c294c663)) + ## [0.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.14.0...google-cloud-batch-v0.15.0) (2023-08-03) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 91c9239232cc..b8c7a12e8c02 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.15.0" # {x-release-please-version} +__version__ = "0.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 91c9239232cc..b8c7a12e8c02 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.15.0" # {x-release-please-version} +__version__ = "0.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index 0837fafc53dd..26b8c8f4065f 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -313,7 +313,7 @@ class JobNotification(proto.Message): Attributes: pubsub_topic (str): The Pub/Sub topic where notifications like the job state - changes will be published. This topic exist in the same + changes will be published. The topic must exist in the same project as the job and billings will be charged to this project. If not specified, no Pub/Sub messages will be sent. Topic format: ``projects/{project}/topics/{topic}``. @@ -339,10 +339,12 @@ class Type(proto.Enum): TASK_STATE_CHANGED = 2 class Message(proto.Message): - r"""Message details. - Describe the attribute that a message should have. - Without specified message attributes, no message will be sent by - default. + r"""Message details. Describe the conditions under which messages will + be sent. If no attribute is defined, no message will be sent by + default. One message should specify either the job or the task level + attributes, but not both. For example, job level: JOB_STATE_CHANGED + and/or a specified new_job_state; task level: TASK_STATE_CHANGED + and/or a specified new_task_state. Attributes: type_ (google.cloud.batch_v1.types.JobNotification.Type): @@ -473,24 +475,13 @@ class Disk(proto.Message): Attributes: image (str): - Name of an image used as the data source. For example, the - following are all valid URLs: + URL for a VM image to use as the data source for this disk. + For example, the following are all valid URLs: - Specify the image by its family name: - - .. raw:: html - -
projects/project/global/images/family/image_family
- + projects/{project}/global/images/family/{image_family} - Specify the image version: - - .. raw:: html - -
projects/project/global/images/image_version
+ projects/{project}/global/images/{image_version} You can also use Batch customized image in short names. The following image values are supported for a boot disk: @@ -514,15 +505,22 @@ class Disk(proto.Message): size_gb (int): Disk size in GB. - For persistent disk, this field is ignored if - ``data_source`` is ``image`` or ``snapshot``. For local SSD, - size_gb should be a multiple of 375GB, otherwise, the final - size will be the next greater multiple of 375 GB. For boot - disk, Batch will calculate the boot disk size based on - source image and task requirements if you do not speicify - the size. If both this field and the boot_disk_mib field in - task spec's compute_resource are defined, Batch will only - honor this field. + **Non-Boot Disk**: If the ``type`` specifies a persistent + disk, this field is ignored if ``data_source`` is set as + ``image`` or ``snapshot``. If the ``type`` specifies a local + SSD, this field should be a multiple of 375 GB, otherwise, + the final size will be the next greater multiple of 375 GB. + + **Boot Disk**: Batch will calculate the boot disk size based + on source image and task requirements if you do not speicify + the size. If both this field and the ``boot_disk_mib`` field + in task spec's ``compute_resource`` are defined, Batch will + only honor this field. Also, this field should be no smaller + than the source disk's size when the ``data_source`` is set + as ``snapshot`` or ``image``. For example, if you set an + image as the ``data_source`` field and the image's default + disk size 30 GB, you can only use this field to make the + disk larger or equal to 30 GB. disk_interface (str): Local SSDs are available through both "SCSI" and "NVMe" interfaces. If not indicated, "NVMe" @@ -661,7 +659,10 @@ class InstancePolicy(proto.Message): disks (MutableSequence[google.cloud.batch_v1.types.AllocationPolicy.AttachedDisk]): Non-boot disks to be attached for each VM created by this InstancePolicy. New disks will - be deleted when the VM is deleted. + be deleted when the VM is deleted. A non-boot + disk is a disk that can be of a device with a + file system or a raw storage drive that is not + ready for data storage and accessing. """ machine_type: str = proto.Field( @@ -696,7 +697,11 @@ class InstancePolicy(proto.Message): ) class InstancePolicyOrTemplate(proto.Message): - r"""Either an InstancePolicy or an instance template. + r"""InstancePolicyOrTemplate lets you define the type of + resources to use for this job either with an InstancePolicy or + an instance template. If undefined, Batch picks the type of VM + to use and doesn't include optional VM resources such as GPUs + and extra disks. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -750,36 +755,23 @@ class NetworkInterface(proto.Message): Attributes: network (str): - The URL of an existing network resource. - You can specify the network as a full or partial - URL. + The URL of an existing network resource. You can specify the + network as a full or partial URL. + For example, the following are all valid URLs: -
https://www.googleapis.com/compute/v1/projects/project/global/networks/network
-
projects/project/global/networks/network
-
global/networks/network
+ - https://www.googleapis.com/compute/v1/projects/{project}/global/networks/{network} + - projects/{project}/global/networks/{network} + - global/networks/{network} subnetwork (str): - The URL of an existing subnetwork resource in - the network. You can specify the subnetwork as a - full or partial URL. + The URL of an existing subnetwork resource in the network. + You can specify the subnetwork as a full or partial URL. + For example, the following are all valid URLs: -
https://www.googleapis.com/compute/v1/projects/project/regions/region/subnetworks/subnetwork
-
projects/project/regions/region/subnetworks/subnetwork
-
regions/region/subnetworks/subnetwork
+ - https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork} + - projects/{project}/regions/{region}/subnetworks/{subnetwork} + - regions/{region}/subnetworks/{subnetwork} no_external_ip_address (bool): Default is false (with an external IP address). Required if no external public IP diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py index 953e3bbf7620..5a0c6a06d9d8 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py @@ -40,13 +40,56 @@ class ComputeResource(proto.Message): - r"""Compute resource requirements + r"""Compute resource requirements. + + ComputeResource defines the amount of resources required for each + task. Make sure your tasks have enough resources to successfully + run. If you also define the types of resources for a job to use with + the + `InstancePolicyOrTemplate `__ + field, make sure both fields are compatible with each other. Attributes: cpu_milli (int): The milliCPU count. + + ``cpuMilli`` defines the amount of CPU resources per task in + milliCPU units. For example, ``1000`` corresponds to 1 vCPU + per task. If undefined, the default value is ``2000``. + + If you also define the VM's machine type using the + ``machineType`` in + `InstancePolicy `__ + field or inside the ``instanceTemplate`` in the + `InstancePolicyOrTemplate `__ + field, make sure the CPU resources for both fields are + compatible with each other and with how many tasks you want + to allow to run on the same VM at the same time. + + For example, if you specify the ``n2-standard-2`` machine + type, which has 2 vCPUs each, you are recommended to set + ``cpuMilli`` no more than ``2000``, or you are recommended + to run two tasks on the same VM if you set ``cpuMilli`` to + ``1000`` or less. memory_mib (int): Memory in MiB. + + ``memoryMib`` defines the amount of memory per task in MiB + units. If undefined, the default value is ``2000``. If you + also define the VM's machine type using the ``machineType`` + in + `InstancePolicy `__ + field or inside the ``instanceTemplate`` in the + `InstancePolicyOrTemplate `__ + field, make sure the memory resources for both fields are + compatible with each other and with how many tasks you want + to allow to run on the same VM at the same time. + + For example, if you specify the ``n2-standard-2`` machine + type, which has 8 GiB each, you are recommended to set + ``memoryMib`` to no more than ``8192``, or you are + recommended to run two tasks on the same VM if you set + ``memoryMib`` to ``4096`` or less. boot_disk_mib (int): Extra boot disk size in MiB for each task. """ diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 91c9239232cc..b8c7a12e8c02 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.15.0" # {x-release-please-version} +__version__ = "0.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py index 93e04a8da729..8b6340192fbd 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py @@ -416,7 +416,7 @@ class JobNotification(proto.Message): Attributes: pubsub_topic (str): The Pub/Sub topic where notifications like the job state - changes will be published. This topic exist in the same + changes will be published. The topic must exist in the same project as the job and billings will be charged to this project. If not specified, no Pub/Sub messages will be sent. Topic format: ``projects/{project}/topics/{topic}``. @@ -442,10 +442,12 @@ class Type(proto.Enum): TASK_STATE_CHANGED = 2 class Message(proto.Message): - r"""Message details. - Describe the attribute that a message should have. - Without specified message attributes, no message will be sent by - default. + r"""Message details. Describe the conditions under which messages will + be sent. If no attribute is defined, no message will be sent by + default. One message should specify either the job or the task level + attributes, but not both. For example, job level: JOB_STATE_CHANGED + and/or a specified new_job_state; task level: TASK_STATE_CHANGED + and/or a specified new_task_state. Attributes: type_ (google.cloud.batch_v1alpha.types.JobNotification.Type): @@ -592,24 +594,13 @@ class Disk(proto.Message): Attributes: image (str): - Name of an image used as the data source. For example, the - following are all valid URLs: + URL for a VM image to use as the data source for this disk. + For example, the following are all valid URLs: - Specify the image by its family name: - - .. raw:: html - -
projects/project/global/images/family/image_family
- + projects/{project}/global/images/family/{image_family} - Specify the image version: - - .. raw:: html - -
projects/project/global/images/image_version
+ projects/{project}/global/images/{image_version} You can also use Batch customized image in short names. The following image values are supported for a boot disk: @@ -633,15 +624,22 @@ class Disk(proto.Message): size_gb (int): Disk size in GB. - For persistent disk, this field is ignored if - ``data_source`` is ``image`` or ``snapshot``. For local SSD, - size_gb should be a multiple of 375GB, otherwise, the final - size will be the next greater multiple of 375 GB. For boot - disk, Batch will calculate the boot disk size based on - source image and task requirements if you do not speicify - the size. If both this field and the boot_disk_mib field in - task spec's compute_resource are defined, Batch will only - honor this field. + **Non-Boot Disk**: If the ``type`` specifies a persistent + disk, this field is ignored if ``data_source`` is set as + ``image`` or ``snapshot``. If the ``type`` specifies a local + SSD, this field should be a multiple of 375 GB, otherwise, + the final size will be the next greater multiple of 375 GB. + + **Boot Disk**: Batch will calculate the boot disk size based + on source image and task requirements if you do not speicify + the size. If both this field and the ``boot_disk_mib`` field + in task spec's ``compute_resource`` are defined, Batch will + only honor this field. Also, this field should be no smaller + than the source disk's size when the ``data_source`` is set + as ``snapshot`` or ``image``. For example, if you set an + image as the ``data_source`` field and the image's default + disk size 30 GB, you can only use this field to make the + disk larger or equal to 30 GB. disk_interface (str): Local SSDs are available through both "SCSI" and "NVMe" interfaces. If not indicated, "NVMe" @@ -782,7 +780,10 @@ class InstancePolicy(proto.Message): disks (MutableSequence[google.cloud.batch_v1alpha.types.AllocationPolicy.AttachedDisk]): Non-boot disks to be attached for each VM created by this InstancePolicy. New disks will - be deleted when the VM is deleted. + be deleted when the VM is deleted. A non-boot + disk is a disk that can be of a device with a + file system or a raw storage drive that is not + ready for data storage and accessing. reservation (str): If specified, VMs will consume only the specified reservation. If not specified @@ -830,7 +831,11 @@ class InstancePolicy(proto.Message): ) class InstancePolicyOrTemplate(proto.Message): - r"""Either an InstancePolicy or an instance template. + r"""InstancePolicyOrTemplate lets you define the type of + resources to use for this job either with an InstancePolicy or + an instance template. If undefined, Batch picks the type of VM + to use and doesn't include optional VM resources such as GPUs + and extra disks. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -884,36 +889,23 @@ class NetworkInterface(proto.Message): Attributes: network (str): - The URL of an existing network resource. - You can specify the network as a full or partial - URL. + The URL of an existing network resource. You can specify the + network as a full or partial URL. + For example, the following are all valid URLs: -
https://www.googleapis.com/compute/v1/projects/project/global/networks/network
-
projects/project/global/networks/network
-
global/networks/network
+ - https://www.googleapis.com/compute/v1/projects/{project}/global/networks/{network} + - projects/{project}/global/networks/{network} + - global/networks/{network} subnetwork (str): - The URL of an existing subnetwork resource in - the network. You can specify the subnetwork as a - full or partial URL. + The URL of an existing subnetwork resource in the network. + You can specify the subnetwork as a full or partial URL. + For example, the following are all valid URLs: -
https://www.googleapis.com/compute/v1/projects/project/regions/region/subnetworks/subnetwork
-
projects/project/regions/region/subnetworks/subnetwork
-
regions/region/subnetworks/subnetwork
+ - https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork} + - projects/{project}/regions/{region}/subnetworks/{subnetwork} + - regions/{region}/subnetworks/{subnetwork} no_external_ip_address (bool): Default is false (with an external IP address). Required if no external public IP diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py index 8ae1aaf3de9c..4a27636f8f02 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py @@ -41,13 +41,56 @@ class ComputeResource(proto.Message): - r"""Compute resource requirements + r"""Compute resource requirements. + + ComputeResource defines the amount of resources required for each + task. Make sure your tasks have enough resources to successfully + run. If you also define the types of resources for a job to use with + the + `InstancePolicyOrTemplate `__ + field, make sure both fields are compatible with each other. Attributes: cpu_milli (int): The milliCPU count. + + ``cpuMilli`` defines the amount of CPU resources per task in + milliCPU units. For example, ``1000`` corresponds to 1 vCPU + per task. If undefined, the default value is ``2000``. + + If you also define the VM's machine type using the + ``machineType`` in + `InstancePolicy `__ + field or inside the ``instanceTemplate`` in the + `InstancePolicyOrTemplate `__ + field, make sure the CPU resources for both fields are + compatible with each other and with how many tasks you want + to allow to run on the same VM at the same time. + + For example, if you specify the ``n2-standard-2`` machine + type, which has 2 vCPUs each, you are recommended to set + ``cpuMilli`` no more than ``2000``, or you are recommended + to run two tasks on the same VM if you set ``cpuMilli`` to + ``1000`` or less. memory_mib (int): Memory in MiB. + + ``memoryMib`` defines the amount of memory per task in MiB + units. If undefined, the default value is ``2000``. If you + also define the VM's machine type using the ``machineType`` + in + `InstancePolicy `__ + field or inside the ``instanceTemplate`` in the + `InstancePolicyOrTemplate `__ + field, make sure the memory resources for both fields are + compatible with each other and with how many tasks you want + to allow to run on the same VM at the same time. + + For example, if you specify the ``n2-standard-2`` machine + type, which has 8 GiB each, you are recommended to set + ``memoryMib`` to no more than ``8192``, or you are + recommended to run two tasks on the same VM if you set + ``memoryMib`` to ``4096`` or less. gpu_count (int): The GPU count. Not yet implemented. @@ -123,12 +166,21 @@ class TaskExecution(proto.Message): When task is completed as the status of FAILED or SUCCEEDED, exit code is for one task execution result, default is 0 as success. + stderr_snippet (str): + Optional. The tail end of any content written + to standard error by the task execution. This + field will be populated only when the execution + failed. """ exit_code: int = proto.Field( proto.INT32, number=1, ) + stderr_snippet: str = proto.Field( + proto.STRING, + number=2, + ) class TaskStatus(proto.Message): @@ -228,6 +280,13 @@ class Runnable(proto.Message): Barrier runnable. This field is a member of `oneof`_ ``executable``. + display_name (str): + Optional. DisplayName is an optional field + that can be provided by the caller. If provided, + it will be used in logs and other outputs to + identify the script, making it easier for users + to understand the logs. If not provided the + index of the runnable will be used for outputs. ignore_exit_status (bool): Normally, a non-zero exit status causes the Task to fail. This flag allows execution of @@ -296,6 +355,13 @@ class Container(proto.Message): Optional password for logging in to a docker registry. If password matches ``projects/*/secrets/*/versions/*`` then Batch will read the password from the Secret Manager; + enable_image_streaming (bool): + Optional. Not yet implemented. + If set to true, container will run with Image + streaming. The container runtime will be changed + to containerd instead of docker. Currently, only + imageUri, commands, entrypoint and volumes are + supported and any other fields will be ignored. """ image_uri: str = proto.Field( @@ -330,6 +396,10 @@ class Container(proto.Message): proto.STRING, number=11, ) + enable_image_streaming: bool = proto.Field( + proto.BOOL, + number=12, + ) class Script(proto.Message): r"""Script runnable. @@ -414,6 +484,10 @@ class Barrier(proto.Message): oneof="executable", message=Barrier, ) + display_name: str = proto.Field( + proto.STRING, + number=10, + ) ignore_exit_status: bool = proto.Field( proto.BOOL, number=3, diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index d804b2574309..001411ace456 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.15.0" + "version": "0.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 1f5bda184d3d..31ec4748cc0d 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.15.0" + "version": "0.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py index 0d34d681a91d..6044651e7c4b 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py @@ -2550,9 +2550,11 @@ def test_create_job_rest(request_type): "block_external_network": True, "username": "username_value", "password": "password_value", + "enable_image_streaming": True, }, "script": {"path": "path_value", "text": "text_value"}, "barrier": {"name": "name_value"}, + "display_name": "display_name_value", "ignore_exit_status": True, "background": True, "always_run": True, @@ -2698,7 +2700,10 @@ def test_create_job_rest(request_type): "type_": "type__value", "description": "description_value", "event_time": {"seconds": 751, "nanos": 543}, - "task_execution": {"exit_code": 948}, + "task_execution": { + "exit_code": 948, + "stderr_snippet": "stderr_snippet_value", + }, "task_state": 1, } ], @@ -2934,9 +2939,11 @@ def test_create_job_rest_bad_request( "block_external_network": True, "username": "username_value", "password": "password_value", + "enable_image_streaming": True, }, "script": {"path": "path_value", "text": "text_value"}, "barrier": {"name": "name_value"}, + "display_name": "display_name_value", "ignore_exit_status": True, "background": True, "always_run": True, @@ -3082,7 +3089,10 @@ def test_create_job_rest_bad_request( "type_": "type__value", "description": "description_value", "event_time": {"seconds": 751, "nanos": 543}, - "task_execution": {"exit_code": 948}, + "task_execution": { + "exit_code": 948, + "stderr_snippet": "stderr_snippet_value", + }, "task_state": 1, } ], diff --git a/packages/google-cloud-bigquery-biglake/.flake8 b/packages/google-cloud-bigquery-biglake/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-bigquery-biglake/.flake8 +++ b/packages/google-cloud-bigquery-biglake/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bigquery-biglake/CHANGELOG.md b/packages/google-cloud-bigquery-biglake/CHANGELOG.md index cd4b12448997..1ed703763851 100644 --- a/packages/google-cloud-bigquery-biglake/CHANGELOG.md +++ b/packages/google-cloud-bigquery-biglake/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.4.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-biglake-v0.4.1...google-cloud-bigquery-biglake-v0.4.2) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + ## [0.4.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-biglake-v0.4.0...google-cloud-bigquery-biglake-v0.4.1) (2023-07-05) diff --git a/packages/google-cloud-bigquery-biglake/CONTRIBUTING.rst b/packages/google-cloud-bigquery-biglake/CONTRIBUTING.rst index f5a152d9ae35..799bcea44310 100644 --- a/packages/google-cloud-bigquery-biglake/CONTRIBUTING.rst +++ b/packages/google-cloud-bigquery-biglake/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-bigquery-biglake/noxfile.py ********** diff --git a/packages/google-cloud-bigquery-biglake/MANIFEST.in b/packages/google-cloud-bigquery-biglake/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-bigquery-biglake/MANIFEST.in +++ b/packages/google-cloud-bigquery-biglake/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bigquery-biglake/README.rst b/packages/google-cloud-bigquery-biglake/README.rst index 4793bcabf1ea..5d004ffb361c 100644 --- a/packages/google-cloud-bigquery-biglake/README.rst +++ b/packages/google-cloud-bigquery-biglake/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-bigquery-biglake + pip install google-cloud-bigquery-biglake Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-bigquery-biglake + py -m venv + .\\Scripts\activate + pip install google-cloud-bigquery-biglake Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-bigquery-biglake/docs/conf.py b/packages/google-cloud-bigquery-biglake/docs/conf.py index 85e36171ff0d..9ff25bc1a59c 100644 --- a/packages/google-cloud-bigquery-biglake/docs/conf.py +++ b/packages/google-cloud-bigquery-biglake/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py index 360a0d13ebdd..a0cde8ce7bb7 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.4.2" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py index 360a0d13ebdd..a0cde8ce7bb7 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.4.2" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py index 360a0d13ebdd..a0cde8ce7bb7 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.4.2" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/noxfile.py b/packages/google-cloud-bigquery-biglake/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-bigquery-biglake/noxfile.py +++ b/packages/google-cloud-bigquery-biglake/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json index bc293aab30cd..c337c197538a 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.1.0" + "version": "0.4.2" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json index f43e6ba05775..b5915175711b 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.1.0" + "version": "0.4.2" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/scripts/decrypt-secrets.sh b/packages/google-cloud-bigquery-biglake/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-bigquery-biglake/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-bigquery-biglake/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md b/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md index 84335bf86846..29e830c6fe82 100644 --- a/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md +++ b/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.5.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-data-exchange-v0.5.5...google-cloud-bigquery-data-exchange-v0.5.6) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + ## [0.5.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-data-exchange-v0.5.4...google-cloud-bigquery-data-exchange-v0.5.5) (2023-08-03) diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py index 8442537e9014..707b752e07d4 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.5" # {x-release-please-version} +__version__ = "0.5.6" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py index 8442537e9014..707b752e07d4 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.5" # {x-release-please-version} +__version__ = "0.5.6" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py index ed5ed88a97d4..915ec49907ab 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py @@ -414,6 +414,7 @@ async def sample_list_org_data_exchanges(): Message for response to listing data exchanges in an organization and location. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1463,6 +1464,7 @@ async def subscribe_listing( metadata: Sequence[Tuple[str, str]] = (), ) -> dataexchange.SubscribeListingResponse: r"""Subscribes to a listing. + Currently, with Analytics Hub, you can create listings that reference only BigQuery datasets. Upon subscription to a listing for a BigQuery dataset, diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py index 862ed4ac6ec6..0b5f967136cd 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py @@ -675,6 +675,7 @@ def sample_list_org_data_exchanges(): Message for response to listing data exchanges in an organization and location. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1724,6 +1725,7 @@ def subscribe_listing( metadata: Sequence[Tuple[str, str]] = (), ) -> dataexchange.SubscribeListingResponse: r"""Subscribes to a listing. + Currently, with Analytics Hub, you can create listings that reference only BigQuery datasets. Upon subscription to a listing for a BigQuery dataset, diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc.py index aa9e77d82a8c..eea0ba3a0669 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc.py @@ -542,6 +542,7 @@ def subscribe_listing( r"""Return a callable for the subscribe listing method over gRPC. Subscribes to a listing. + Currently, with Analytics Hub, you can create listings that reference only BigQuery datasets. Upon subscription to a listing for a BigQuery dataset, diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc_asyncio.py index ce69fd27ccc4..09b446e48712 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc_asyncio.py @@ -553,6 +553,7 @@ def subscribe_listing( r"""Return a callable for the subscribe listing method over gRPC. Subscribes to a listing. + Currently, with Analytics Hub, you can create listings that reference only BigQuery datasets. Upon subscription to a listing for a BigQuery dataset, diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/types/dataexchange.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/types/dataexchange.py index 35aaf5b99060..6953534fc5c7 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/types/dataexchange.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/types/dataexchange.py @@ -85,11 +85,11 @@ class DataExchange(proto.Message): the data exchange. icon (bytes): Optional. Base64 encoded image representing - the data exchange. Max Size: 3.0MiB - Expected image dimensions are 512x512 pixels, - however the API only performs validation on size - of the encoded data. Note: For byte fields, the - content of the fields are base64-encoded (which + the data exchange. Max Size: 3.0MiB Expected + image dimensions are 512x512 pixels, however the + API only performs validation on size of the + encoded data. Note: For byte fields, the content + of the fields are base64-encoded (which increases the size of the data by 33-36%) when using JSON on the wire. """ @@ -286,13 +286,13 @@ class Listing(proto.Message): Output only. Current state of the listing. icon (bytes): Optional. Base64 encoded image representing - the listing. Max Size: 3.0MiB - Expected image dimensions are 512x512 pixels, - however the API only performs validation on size - of the encoded data. Note: For byte fields, the - contents of the field are base64-encoded (which - increases the size of the data by 33-36%) when - using JSON on the wire. + the listing. Max Size: 3.0MiB Expected image + dimensions are 512x512 pixels, however the API + only performs validation on size of the encoded + data. Note: For byte fields, the contents of the + field are base64-encoded (which increases the + size of the data by 33-36%) when using JSON on + the wire. data_provider (google.cloud.bigquery_data_exchange_v1beta1.types.DataProvider): Optional. Details of the data provider who owns the source data. diff --git a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json index 852b26534791..8d4f2bc3aa75 100644 --- a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json +++ b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-data-exchange", - "version": "0.5.5" + "version": "0.5.6" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md b/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md index 131586e10e82..32d1c3cd290c 100644 --- a/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md +++ b/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## [0.6.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datapolicies-v0.6.0...google-cloud-bigquery-datapolicies-v0.6.1) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + +## [0.6.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datapolicies-v0.5.3...google-cloud-bigquery-datapolicies-v0.6.0) (2023-08-31) + + +### Features + +* support using custom UDF in the data policy ([#11602](https://github.com/googleapis/google-cloud-python/issues/11602)) ([f1f0e58](https://github.com/googleapis/google-cloud-python/commit/f1f0e58667bbe4558b87101983cdd245fcdf71d9)) + +## [0.5.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datapolicies-v0.5.2...google-cloud-bigquery-datapolicies-v0.5.3) (2023-08-11) + + +### Bug Fixes + +* sync the new PredefinedExpression types to the client library ([#11567](https://github.com/googleapis/google-cloud-python/issues/11567)) ([a971664](https://github.com/googleapis/google-cloud-python/commit/a971664e43167093a7e1925dd0bed528e3e99ee1)) + ## [0.5.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datapolicies-v0.5.1...google-cloud-bigquery-datapolicies-v0.5.2) (2023-08-03) diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py index fb3dbbf711d8..d205b4deaf8e 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.2" # {x-release-please-version} +__version__ = "0.6.1" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py index fb3dbbf711d8..d205b4deaf8e 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.2" # {x-release-please-version} +__version__ = "0.6.1" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py index 32298da17c4c..a77a6790fbc0 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py @@ -254,7 +254,7 @@ async def sample_create_data_policy(): # Initialize request argument(s) data_policy = bigquery_datapolicies_v1.DataPolicy() data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" + data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" request = bigquery_datapolicies_v1.CreateDataPolicyRequest( parent="parent_value", @@ -382,7 +382,7 @@ async def sample_update_data_policy(): # Initialize request argument(s) data_policy = bigquery_datapolicies_v1.DataPolicy() data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" + data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" request = bigquery_datapolicies_v1.UpdateDataPolicyRequest( data_policy=data_policy, diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py index dd4353b96802..85e12cc88d18 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py @@ -483,7 +483,7 @@ def sample_create_data_policy(): # Initialize request argument(s) data_policy = bigquery_datapolicies_v1.DataPolicy() data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" + data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" request = bigquery_datapolicies_v1.CreateDataPolicyRequest( parent="parent_value", @@ -602,7 +602,7 @@ def sample_update_data_policy(): # Initialize request argument(s) data_policy = bigquery_datapolicies_v1.DataPolicy() data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" + data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" request = bigquery_datapolicies_v1.UpdateDataPolicyRequest( data_policy=data_policy, diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/types/datapolicy.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/types/datapolicy.py index 8950c60b3803..0facfefa36ad 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/types/datapolicy.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/types/datapolicy.py @@ -167,9 +167,10 @@ class ListDataPoliciesRequest(proto.Message): associated with. Currently filter only supports "policy\_tag" based filtering and OR based predicates. Sample filter can be "policy\_tag: - ``'projects/1/locations/us/taxonomies/2/policyTags/3'``". - You may use wildcard such as "policy\_tag: - ``'projects/1/locations/us/taxonomies/2/*'``". + projects/1/locations/us/taxonomies/2/policyTags/3". You may + also use wildcard such as "policy\_tag: + projects/1/locations/us/taxonomies/2*". Please note that OR + predicates cannot be used with wildcard filters. """ parent: str = proto.Field( @@ -293,6 +294,10 @@ class DataMaskingPolicy(proto.Message): r"""The data masking policy that is used to specify data masking rule. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -300,6 +305,12 @@ class DataMaskingPolicy(proto.Message): predefined_expression (google.cloud.bigquery_datapolicies_v1.types.DataMaskingPolicy.PredefinedExpression): A predefined masking expression. + This field is a member of `oneof`_ ``masking_expression``. + routine (str): + The name of the BigQuery routine that contains the custom + masking routine, in the format of + ``projects/{project_number}/datasets/{dataset_id}/routines/{routine_id}``. + This field is a member of `oneof`_ ``masking_expression``. """ @@ -329,20 +340,62 @@ class PredefinedExpression(proto.Enum): - FLOAT: 0.0 - NUMERIC: 0 - BOOLEAN: FALSE - - TIMESTAMP: 0001-01-01 00:00:00 UTC - - DATE: 0001-01-01 + - TIMESTAMP: 1970-01-01 00:00:00 UTC + - DATE: 1970-01-01 - TIME: 00:00:00 - - DATETIME: 0001-01-01T00:00:00 + - DATETIME: 1970-01-01T00:00:00 - GEOGRAPHY: POINT(0 0) - BIGNUMERIC: 0 - ARRAY: [] - STRUCT: NOT_APPLICABLE - JSON: NULL + LAST_FOUR_CHARACTERS (9): + Masking expression shows the last four characters of text. + The masking behavior is as follows: + + - If text length > 4 characters: Replace text with XXXXX, + append last four characters of original text. + - If text length <= 4 characters: Apply SHA-256 hash. + FIRST_FOUR_CHARACTERS (10): + Masking expression shows the first four characters of text. + The masking behavior is as follows: + + - If text length > 4 characters: Replace text with XXXXX, + prepend first four characters of original text. + - If text length <= 4 characters: Apply SHA-256 hash. + EMAIL_MASK (12): + Masking expression for email addresses. The masking behavior + is as follows: + + - Syntax-valid email address: Replace username with XXXXX. + For example, cloudysanfrancisco@gmail.com becomes + XXXXX@gmail.com. + - Syntax-invalid email address: Apply SHA-256 hash. + + For more information, see `Email + mask `__. + DATE_YEAR_MASK (13): + Masking expression to only show the year of ``Date``, + ``DateTime`` and ``TimeStamp``. For example, with the year + 2076: + + - DATE : 2076-01-01 + - DATETIME : 2076-01-01T00:00:00 + - TIMESTAMP : 2076-01-01 00:00:00 UTC + + Truncation occurs according to the UTC time zone. To change + this, adjust the default time zone using the ``time_zone`` + system variable. For more information, see the System + variables reference. """ PREDEFINED_EXPRESSION_UNSPECIFIED = 0 SHA256 = 3 ALWAYS_NULL = 5 DEFAULT_MASKING_VALUE = 7 + LAST_FOUR_CHARACTERS = 9 + FIRST_FOUR_CHARACTERS = 10 + EMAIL_MASK = 12 + DATE_YEAR_MASK = 13 predefined_expression: PredefinedExpression = proto.Field( proto.ENUM, @@ -350,6 +403,11 @@ class PredefinedExpression(proto.Enum): oneof="masking_expression", enum=PredefinedExpression, ) + routine: str = proto.Field( + proto.STRING, + number=3, + oneof="masking_expression", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py index fb3dbbf711d8..d205b4deaf8e 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.2" # {x-release-please-version} +__version__ = "0.6.1" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_async.py b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_async.py index c06a04b7e9f2..d4e55a95dcd6 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_async.py +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_async.py @@ -41,7 +41,7 @@ async def sample_create_data_policy(): # Initialize request argument(s) data_policy = bigquery_datapolicies_v1.DataPolicy() data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" + data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" request = bigquery_datapolicies_v1.CreateDataPolicyRequest( parent="parent_value", diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_sync.py b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_sync.py index 8a7834eae517..75ddb2ac736a 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_sync.py +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_sync.py @@ -41,7 +41,7 @@ def sample_create_data_policy(): # Initialize request argument(s) data_policy = bigquery_datapolicies_v1.DataPolicy() data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" + data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" request = bigquery_datapolicies_v1.CreateDataPolicyRequest( parent="parent_value", diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_async.py b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_async.py index d31ac782adb6..cdfcbb8386b4 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_async.py +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_async.py @@ -41,7 +41,7 @@ async def sample_update_data_policy(): # Initialize request argument(s) data_policy = bigquery_datapolicies_v1.DataPolicy() data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" + data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" request = bigquery_datapolicies_v1.UpdateDataPolicyRequest( data_policy=data_policy, diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_sync.py b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_sync.py index ade4014cbf98..c3b1d4bdc61c 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_sync.py +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_sync.py @@ -41,7 +41,7 @@ def sample_update_data_policy(): # Initialize request argument(s) data_policy = bigquery_datapolicies_v1.DataPolicy() data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" + data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" request = bigquery_datapolicies_v1.UpdateDataPolicyRequest( data_policy=data_policy, diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json index fc95bf000f42..eb5ea25c0c21 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.5.2" + "version": "0.6.1" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json index 1466ff390e66..c5373a33336b 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.5.2" + "version": "0.6.1" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py index 271130e4b75b..978dfa549317 100644 --- a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py +++ b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py @@ -3014,7 +3014,7 @@ def test_create_data_policy_rest(request_type): request_init = {"parent": "projects/sample1/locations/sample2"} request_init["data_policy"] = { "policy_tag": "policy_tag_value", - "data_masking_policy": {"predefined_expression": 3}, + "data_masking_policy": {"predefined_expression": 3, "routine": "routine_value"}, "name": "name_value", "data_policy_type": 3, "data_policy_id": "data_policy_id_value", @@ -3215,7 +3215,7 @@ def test_create_data_policy_rest_bad_request( request_init = {"parent": "projects/sample1/locations/sample2"} request_init["data_policy"] = { "policy_tag": "policy_tag_value", - "data_masking_policy": {"predefined_expression": 3}, + "data_masking_policy": {"predefined_expression": 3, "routine": "routine_value"}, "name": "name_value", "data_policy_type": 3, "data_policy_id": "data_policy_id_value", @@ -3319,7 +3319,7 @@ def test_update_data_policy_rest(request_type): } request_init["data_policy"] = { "policy_tag": "policy_tag_value", - "data_masking_policy": {"predefined_expression": 3}, + "data_masking_policy": {"predefined_expression": 3, "routine": "routine_value"}, "name": "projects/sample1/locations/sample2/dataPolicies/sample3", "data_policy_type": 3, "data_policy_id": "data_policy_id_value", @@ -3513,7 +3513,7 @@ def test_update_data_policy_rest_bad_request( } request_init["data_policy"] = { "policy_tag": "policy_tag_value", - "data_masking_policy": {"predefined_expression": 3}, + "data_masking_policy": {"predefined_expression": 3, "routine": "routine_value"}, "name": "projects/sample1/locations/sample2/dataPolicies/sample3", "data_policy_type": 3, "data_policy_id": "data_policy_id_value", diff --git a/packages/google-cloud-bigquery-logging/.flake8 b/packages/google-cloud-bigquery-logging/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-bigquery-logging/.flake8 +++ b/packages/google-cloud-bigquery-logging/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bigquery-logging/CHANGELOG.md b/packages/google-cloud-bigquery-logging/CHANGELOG.md index 74d96d7da7dd..e54641437d67 100644 --- a/packages/google-cloud-bigquery-logging/CHANGELOG.md +++ b/packages/google-cloud-bigquery-logging/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [1.3.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-logging-v1.2.2...google-cloud-bigquery-logging-v1.3.0) (2023-09-19) + + +### Features + +* add the name of the reservation the job was submitted to as a field ([#11643](https://github.com/googleapis/google-cloud-python/issues/11643)) ([b736f9a](https://github.com/googleapis/google-cloud-python/commit/b736f9a2440cad0cb01538b415ab034d908c4716)) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + ## [1.2.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-logging-v1.2.1...google-cloud-bigquery-logging-v1.2.2) (2023-07-05) diff --git a/packages/google-cloud-bigquery-logging/CONTRIBUTING.rst b/packages/google-cloud-bigquery-logging/CONTRIBUTING.rst index 2eb31b6cd01e..fd48e66dd5bd 100644 --- a/packages/google-cloud-bigquery-logging/CONTRIBUTING.rst +++ b/packages/google-cloud-bigquery-logging/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-bigquery-logging/noxfile.py ********** diff --git a/packages/google-cloud-bigquery-logging/MANIFEST.in b/packages/google-cloud-bigquery-logging/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-bigquery-logging/MANIFEST.in +++ b/packages/google-cloud-bigquery-logging/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bigquery-logging/README.rst b/packages/google-cloud-bigquery-logging/README.rst index 6ad392ce795d..efa9f0d1030c 100644 --- a/packages/google-cloud-bigquery-logging/README.rst +++ b/packages/google-cloud-bigquery-logging/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-bigquery-logging + pip install google-cloud-bigquery-logging Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-bigquery-logging + py -m venv + .\\Scripts\activate + pip install google-cloud-bigquery-logging Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-bigquery-logging/docs/conf.py b/packages/google-cloud-bigquery-logging/docs/conf.py index 4f3f9ea41566..2c8ebb4f7797 100644 --- a/packages/google-cloud-bigquery-logging/docs/conf.py +++ b/packages/google-cloud-bigquery-logging/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py index 360a0d13ebdd..5f7437f6f8d0 100644 --- a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py +++ b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.3.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py index 360a0d13ebdd..5f7437f6f8d0 100644 --- a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.3.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/types/audit_data.py b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/types/audit_data.py index 57215debefa8..30f4defb30e9 100644 --- a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/types/audit_data.py +++ b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/types/audit_data.py @@ -1291,6 +1291,9 @@ class JobStatistics(proto.Message): populated. Aggregate usage of all jobs submitted to a reservation should provide a more reliable indicator of reservation imbalance. + reservation (str): + Reservation name or "unreserved" for + on-demand resource usage. referenced_tables (MutableSequence[google.cloud.bigquery_logging_v1.types.TableName]): The first N tables accessed by the query job. Older queries that reference a large number of tables may not have all of @@ -1375,6 +1378,10 @@ class ReservationResourceUsage(proto.Message): number=14, message=ReservationResourceUsage, ) + reservation: str = proto.Field( + proto.STRING, + number=16, + ) referenced_tables: MutableSequence["TableName"] = proto.RepeatedField( proto.MESSAGE, number=9, diff --git a/packages/google-cloud-bigquery-logging/noxfile.py b/packages/google-cloud-bigquery-logging/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-bigquery-logging/noxfile.py +++ b/packages/google-cloud-bigquery-logging/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bigquery-logging/scripts/decrypt-secrets.sh b/packages/google-cloud-bigquery-logging/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-bigquery-logging/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-bigquery-logging/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-bigquery-migration/.OwlBot.yaml b/packages/google-cloud-bigquery-migration/.OwlBot.yaml new file mode 100644 index 000000000000..32f73a0c8926 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/.OwlBot.yaml @@ -0,0 +1,23 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/bigquery/migration/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-bigquery-migration/$1 + +begin-after-commit-hash: 70f7f0525414fe4dfeb2fc2e81546b073f83a621 diff --git a/packages/google-cloud-bigquery-migration/.coveragerc b/packages/google-cloud-bigquery-migration/.coveragerc new file mode 100644 index 000000000000..c16fcf7df5d3 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/bigquery_migration/__init__.py + google/cloud/bigquery_migration/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-bigquery-migration/.flake8 b/packages/google-cloud-bigquery-migration/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-bigquery-migration/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-bigquery-migration/.gitignore b/packages/google-cloud-bigquery-migration/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-bigquery-migration/.repo-metadata.json b/packages/google-cloud-bigquery-migration/.repo-metadata.json new file mode 100644 index 000000000000..451dadee7982 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "bigquerymigration", + "name_pretty": "Google BigQuery Migration", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/migration/", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerymigration/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-bigquery-migration", + "api_id": "bigquerymigration.googleapis.com", + "requires_billing": true, + "default_version": "v2", + "codeowner_team": "@googleapis/api-bigquery", + "api_shortname": "bigquerymigration" +} diff --git a/packages/google-cloud-bigquery-migration/CHANGELOG.md b/packages/google-cloud-bigquery-migration/CHANGELOG.md new file mode 100644 index 000000000000..2b13ef526bc8 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/CHANGELOG.md @@ -0,0 +1,221 @@ +# Changelog + +## [0.11.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-migration-v0.11.1...google-cloud-bigquery-migration-v0.11.2) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + +## [0.11.1](https://github.com/googleapis/python-bigquery-migration/compare/v0.11.0...v0.11.1) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#191](https://github.com/googleapis/python-bigquery-migration/issues/191)) ([d14439c](https://github.com/googleapis/python-bigquery-migration/commit/d14439c44c58a274e2bdfb6cde1d0883baee1a3c)) + +## [0.11.0](https://github.com/googleapis/python-bigquery-migration/compare/v0.10.0...v0.11.0) (2023-03-23) + + +### Features + +* Add `request_source` field to `TranslationConfigDetails` ([d11aa1f](https://github.com/googleapis/python-bigquery-migration/commit/d11aa1fdb4a7d9c7e0bb270ad5b8306a91485818)) +* Add PENDING_DEPENDENCY to `State` enum of `MigrationSubtask` ([d11aa1f](https://github.com/googleapis/python-bigquery-migration/commit/d11aa1fdb4a7d9c7e0bb270ad5b8306a91485818)) + + +### Documentation + +* Fix formatting of request arg in docstring ([#177](https://github.com/googleapis/python-bigquery-migration/issues/177)) ([699d68c](https://github.com/googleapis/python-bigquery-migration/commit/699d68cfddad624609ec1913fbbe1992d83d0806)) + +## [0.10.0](https://github.com/googleapis/python-bigquery-migration/compare/v0.9.1...v0.10.0) (2023-03-01) + + +### Features + +* **v2alpha:** Add SQL translation service ([#170](https://github.com/googleapis/python-bigquery-migration/issues/170)) ([533d1d8](https://github.com/googleapis/python-bigquery-migration/commit/533d1d87e3bfb917488550a979b35994ebe293ed)) + +## [0.9.1](https://github.com/googleapis/python-bigquery-migration/compare/v0.9.0...v0.9.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([970281e](https://github.com/googleapis/python-bigquery-migration/commit/970281ecea66632b651f8346166509e0e7160168)) + + +### Documentation + +* Add documentation for enums ([970281e](https://github.com/googleapis/python-bigquery-migration/commit/970281ecea66632b651f8346166509e0e7160168)) + +## [0.9.0](https://github.com/googleapis/python-bigquery-migration/compare/v0.8.0...v0.9.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#157](https://github.com/googleapis/python-bigquery-migration/issues/157)) ([d9b27e4](https://github.com/googleapis/python-bigquery-migration/commit/d9b27e4519d47b5cc95a04336100f7fcaf7235c7)) + +## [0.8.0](https://github.com/googleapis/python-bigquery-migration/compare/v0.7.2...v0.8.0) (2022-12-15) + + +### Features + +* Add support for `google.cloud.bigquery_migration.__version__` ([8f180ec](https://github.com/googleapis/python-bigquery-migration/commit/8f180ec2c2dec24d73982d5af2f9af8f8f7dfa8f)) +* Add typing to proto.Message based class attributes ([8f180ec](https://github.com/googleapis/python-bigquery-migration/commit/8f180ec2c2dec24d73982d5af2f9af8f8f7dfa8f)) + + +### Bug Fixes + +* Add dict typing for client_options ([8f180ec](https://github.com/googleapis/python-bigquery-migration/commit/8f180ec2c2dec24d73982d5af2f9af8f8f7dfa8f)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([61bc618](https://github.com/googleapis/python-bigquery-migration/commit/61bc618abf8ced62fea472b9ceb25a6c5fe24e6d)) +* Drop usage of pkg_resources ([61bc618](https://github.com/googleapis/python-bigquery-migration/commit/61bc618abf8ced62fea472b9ceb25a6c5fe24e6d)) +* Fix timeout default values ([61bc618](https://github.com/googleapis/python-bigquery-migration/commit/61bc618abf8ced62fea472b9ceb25a6c5fe24e6d)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([8f180ec](https://github.com/googleapis/python-bigquery-migration/commit/8f180ec2c2dec24d73982d5af2f9af8f8f7dfa8f)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([61bc618](https://github.com/googleapis/python-bigquery-migration/commit/61bc618abf8ced62fea472b9ceb25a6c5fe24e6d)) + +## [0.7.2](https://github.com/googleapis/python-bigquery-migration/compare/v0.7.1...v0.7.2) (2022-10-08) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#139](https://github.com/googleapis/python-bigquery-migration/issues/139)) ([5258fa6](https://github.com/googleapis/python-bigquery-migration/commit/5258fa6112f7f47ec0b3351f45dd90f18eaa5181)) + +## [0.7.1](https://github.com/googleapis/python-bigquery-migration/compare/v0.7.0...v0.7.1) (2022-09-29) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#136](https://github.com/googleapis/python-bigquery-migration/issues/136)) ([f69ed02](https://github.com/googleapis/python-bigquery-migration/commit/f69ed025709bf30d4dca51d09a9fd9b3a3d964cd)) + +## [0.7.0](https://github.com/googleapis/python-bigquery-migration/compare/v0.6.0...v0.7.0) (2022-08-11) + + +### Features + +* **v2:** Add MySQL dialect ([#116](https://github.com/googleapis/python-bigquery-migration/issues/116)) ([9f66ca4](https://github.com/googleapis/python-bigquery-migration/commit/9f66ca4c1e8b1438a8f11919c24fb6506a5790ea)) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#118](https://github.com/googleapis/python-bigquery-migration/issues/118)) ([b217543](https://github.com/googleapis/python-bigquery-migration/commit/b2175431fd5b2391f9490a5900977c5768d6712a)) +* **deps:** require proto-plus >= 1.22.0 ([b217543](https://github.com/googleapis/python-bigquery-migration/commit/b2175431fd5b2391f9490a5900977c5768d6712a)) + +## [0.6.0](https://github.com/googleapis/python-bigquery-migration/compare/v0.5.0...v0.6.0) (2022-07-16) + + +### Features + +* add audience parameter ([77e4c52](https://github.com/googleapis/python-bigquery-migration/commit/77e4c52d597e62956e2349dfff2f1b88df013450)) +* Add Presto and Postgresql dialects ([#103](https://github.com/googleapis/python-bigquery-migration/issues/103)) ([77e4c52](https://github.com/googleapis/python-bigquery-migration/commit/77e4c52d597e62956e2349dfff2f1b88df013450)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([77e4c52](https://github.com/googleapis/python-bigquery-migration/commit/77e4c52d597e62956e2349dfff2f1b88df013450)) +* require python 3.7+ ([#105](https://github.com/googleapis/python-bigquery-migration/issues/105)) ([13f50a7](https://github.com/googleapis/python-bigquery-migration/commit/13f50a7b845f5d4ce0aa03821578b730ea37dbb2)) + +## [0.5.0](https://github.com/googleapis/python-bigquery-migration/compare/v0.4.3...v0.5.0) (2022-06-15) + + +### Features + +* Add SQL Server dialect to bigquerymigration v2 client library ([#99](https://github.com/googleapis/python-bigquery-migration/issues/99)) ([35a1099](https://github.com/googleapis/python-bigquery-migration/commit/35a10990d6d9019511d9c1813f4f6d5889004189)) + +## [0.4.3](https://github.com/googleapis/python-bigquery-migration/compare/v0.4.2...v0.4.3) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#95](https://github.com/googleapis/python-bigquery-migration/issues/95)) ([0b93e8d](https://github.com/googleapis/python-bigquery-migration/commit/0b93e8d0332269cf837d146106d8d5fbdcd42cc4)) + + +### Documentation + +* fix changelog header to consistent size ([#96](https://github.com/googleapis/python-bigquery-migration/issues/96)) ([e95f0fc](https://github.com/googleapis/python-bigquery-migration/commit/e95f0fcad0f6c3366ade1637b51295a89d2bc1b2)) + +## [0.4.2](https://github.com/googleapis/python-bigquery-migration/compare/v0.4.1...v0.4.2) (2022-05-22) + + +### Documentation + +* **samples:** add create_migration_workflow snippet ([#71](https://github.com/googleapis/python-bigquery-migration/issues/71)) ([761b5f5](https://github.com/googleapis/python-bigquery-migration/commit/761b5f5045edbe8c81a31f501bf3b14de7dffe20)) + +## [0.4.1](https://github.com/googleapis/python-bigquery-migration/compare/v0.4.0...v0.4.1) (2022-05-05) + + +### Documentation + +* fix type in docstring for map fields ([6c5982b](https://github.com/googleapis/python-bigquery-migration/commit/6c5982bf2fcc0d90d6a6951a7dd676e9b7974627)) + +## [0.4.0](https://github.com/googleapis/python-bigquery-migration/compare/v0.3.1...v0.4.0) (2022-04-03) + + +### Features + +* Add bigquery_migration v2 client library ([#54](https://github.com/googleapis/python-bigquery-migration/issues/54)) ([776ea61](https://github.com/googleapis/python-bigquery-migration/commit/776ea6189f6a94c5daa5af6b4fa7e0e3b21015ec)) + + +### Bug Fixes + +* set bigquery_migration_v2 as the default import ([776ea61](https://github.com/googleapis/python-bigquery-migration/commit/776ea6189f6a94c5daa5af6b4fa7e0e3b21015ec)) + +## [0.3.1](https://github.com/googleapis/python-bigquery-migration/compare/v0.3.0...v0.3.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#48](https://github.com/googleapis/python-bigquery-migration/issues/48)) ([2d8b0b5](https://github.com/googleapis/python-bigquery-migration/commit/2d8b0b5845573d2784b6bfa925285c6ddac5c1e7)) + +## [0.3.0](https://github.com/googleapis/python-bigquery-migration/compare/v0.2.1...v0.3.0) (2022-02-14) + + +### Features + +* add api key support ([#34](https://github.com/googleapis/python-bigquery-migration/issues/34)) ([124de81](https://github.com/googleapis/python-bigquery-migration/commit/124de81e97b39694433820678704b3f6079ce1e2)) +* Add task details and orchestration result details ([#32](https://github.com/googleapis/python-bigquery-migration/issues/32)) ([44c10e1](https://github.com/googleapis/python-bigquery-migration/commit/44c10e17767135b7a5c9a5e22b82260be75459b1)) + + +### Bug Fixes + +* **deps:** remove unused dependency libcst ([#39](https://github.com/googleapis/python-bigquery-migration/issues/39)) ([496abc7](https://github.com/googleapis/python-bigquery-migration/commit/496abc7854985c6f1bfd8463330f2f07a0f3048c)) +* resolve DuplicateCredentialArgs error when using credentials_file ([b3b1ee2](https://github.com/googleapis/python-bigquery-migration/commit/b3b1ee2c0075adadedeef28a5853a440fc1e6535)) + + +### Documentation + +* add generated snippets ([#38](https://github.com/googleapis/python-bigquery-migration/issues/38)) ([13b7ac7](https://github.com/googleapis/python-bigquery-migration/commit/13b7ac71ace1cc226d6fa5b43dde345c3ac3e489)) + +## [0.2.1](https://www.github.com/googleapis/python-bigquery-migration/compare/v0.2.0...v0.2.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([70ec0c5](https://www.github.com/googleapis/python-bigquery-migration/commit/70ec0c5da7cf18ed632bfb19c5f2d6bfb8d2334a)) +* **deps:** require google-api-core >= 1.28.0 ([70ec0c5](https://www.github.com/googleapis/python-bigquery-migration/commit/70ec0c5da7cf18ed632bfb19c5f2d6bfb8d2334a)) + + +### Documentation + +* list oneofs in docstring ([70ec0c5](https://www.github.com/googleapis/python-bigquery-migration/commit/70ec0c5da7cf18ed632bfb19c5f2d6bfb8d2334a)) + +## [0.2.0](https://www.github.com/googleapis/python-bigquery-migration/compare/v0.1.0...v0.2.0) (2021-10-08) + + +### Features + +* add context manager support in client ([#8](https://www.github.com/googleapis/python-bigquery-migration/issues/8)) ([d9dbb32](https://www.github.com/googleapis/python-bigquery-migration/commit/d9dbb32edeffee1e559f979300713a4a10cd9806)) + +## 0.1.0 (2021-09-30) + + +### Features + +* generate v2alpha ([ccb7ca8](https://www.github.com/googleapis/python-bigquery-migration/commit/ccb7ca8473252da1eeab1ba4338c65a5ff0e6e8e)) + + +### Bug Fixes + +* address testing failures from initial generation ([#1](https://www.github.com/googleapis/python-bigquery-migration/issues/1)) ([85284a7](https://www.github.com/googleapis/python-bigquery-migration/commit/85284a7f1f7ed39cd2de61ecae5ed40656283533)) +* correct python namespace for migration API ([#3](https://www.github.com/googleapis/python-bigquery-migration/issues/3)) ([3dda870](https://www.github.com/googleapis/python-bigquery-migration/commit/3dda8702d54ee39897322215c2a551921356ae61)) diff --git a/packages/google-cloud-bigquery-migration/CODE_OF_CONDUCT.md b/packages/google-cloud-bigquery-migration/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-bigquery-migration/CONTRIBUTING.rst b/packages/google-cloud-bigquery-migration/CONTRIBUTING.rst new file mode 100644 index 000000000000..39799da1a15e --- /dev/null +++ b/packages/google-cloud-bigquery-migration/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-bigquery-migration + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-bigquery-migration/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-bigquery-migration/LICENSE b/packages/google-cloud-bigquery-migration/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-bigquery-migration/MANIFEST.in b/packages/google-cloud-bigquery-migration/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-bigquery-migration/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-bigquery-migration/README.rst b/packages/google-cloud-bigquery-migration/README.rst new file mode 100644 index 000000000000..64280d74e5b2 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/README.rst @@ -0,0 +1,108 @@ +Python Client for Google BigQuery Migration +=========================================== + +|preview| |pypi| |versions| + +`Google BigQuery Migration`_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-migration.svg + :target: https://pypi.org/project/google-cloud-bigquery-migration/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-migration.svg + :target: https://pypi.org/project/google-cloud-bigquery-migration/ +.. _Google BigQuery Migration: https://cloud.google.com/bigquery/docs/reference/migration/ +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/bigquerymigration/latest +.. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/migration/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google BigQuery Migration.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google BigQuery Migration.: https://cloud.google.com/bigquery/docs/reference/migration/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-bigquery-migration + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-bigquery-migration + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google BigQuery Migration + to see other available methods on the client. +- Read the `Google BigQuery Migration Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google BigQuery Migration Product documentation: https://cloud.google.com/bigquery/docs/reference/migration/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-bigquery-migration/SECURITY.md b/packages/google-cloud-bigquery-migration/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-bigquery-migration/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-bigquery-migration/bigquery-migration-v2alpha-py.tar.gz b/packages/google-cloud-bigquery-migration/bigquery-migration-v2alpha-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-bigquery-migration/docs/CHANGELOG.md b/packages/google-cloud-bigquery-migration/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-migration/docs/README.rst b/packages/google-cloud-bigquery-migration/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-bigquery-migration/docs/_static/custom.css b/packages/google-cloud-bigquery-migration/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-bigquery-migration/docs/_templates/layout.html b/packages/google-cloud-bigquery-migration/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2/migration_service.rst b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2/migration_service.rst new file mode 100644 index 000000000000..513891e3e8c9 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2/migration_service.rst @@ -0,0 +1,10 @@ +MigrationService +---------------------------------- + +.. automodule:: google.cloud.bigquery_migration_v2.services.migration_service + :members: + :inherited-members: + +.. automodule:: google.cloud.bigquery_migration_v2.services.migration_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2/services.rst b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2/services.rst new file mode 100644 index 000000000000..74ebb7ac08f8 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Bigquery Migration v2 API +=================================================== +.. toctree:: + :maxdepth: 2 + + migration_service diff --git a/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2/types.rst b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2/types.rst new file mode 100644 index 000000000000..60d74a5de948 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Bigquery Migration v2 API +================================================ + +.. automodule:: google.cloud.bigquery_migration_v2.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/migration_service.rst b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/migration_service.rst new file mode 100644 index 000000000000..e676817164a0 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/migration_service.rst @@ -0,0 +1,10 @@ +MigrationService +---------------------------------- + +.. automodule:: google.cloud.bigquery_migration_v2alpha.services.migration_service + :members: + :inherited-members: + +.. automodule:: google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/services.rst b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/services.rst new file mode 100644 index 000000000000..717bf41602e1 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/services.rst @@ -0,0 +1,7 @@ +Services for Google Cloud Bigquery Migration v2alpha API +======================================================== +.. toctree:: + :maxdepth: 2 + + migration_service + sql_translation_service diff --git a/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/sql_translation_service.rst b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/sql_translation_service.rst new file mode 100644 index 000000000000..4d89dd21f8e2 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/sql_translation_service.rst @@ -0,0 +1,6 @@ +SqlTranslationService +--------------------------------------- + +.. automodule:: google.cloud.bigquery_migration_v2alpha.services.sql_translation_service + :members: + :inherited-members: diff --git a/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/types.rst b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/types.rst new file mode 100644 index 000000000000..0260c71e2abb --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/bigquery_migration_v2alpha/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Bigquery Migration v2alpha API +===================================================== + +.. automodule:: google.cloud.bigquery_migration_v2alpha.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-bigquery-migration/docs/conf.py b/packages/google-cloud-bigquery-migration/docs/conf.py new file mode 100644 index 000000000000..5b35bb5a67f0 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-bigquery-migration documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-bigquery-migration" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-bigquery-migration", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-bigquery-migration-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-bigquery-migration.tex", + "google-cloud-bigquery-migration Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-bigquery-migration", + "google-cloud-bigquery-migration Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-bigquery-migration", + "google-cloud-bigquery-migration Documentation", + author, + "google-cloud-bigquery-migration", + "google-cloud-bigquery-migration Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-bigquery-migration/docs/index.rst b/packages/google-cloud-bigquery-migration/docs/index.rst new file mode 100644 index 000000000000..2573a40c417f --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/index.rst @@ -0,0 +1,34 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of Google BigQuery Migration. +By default, you will get version ``bigquery_migration_v2``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + bigquery_migration_v2/services + bigquery_migration_v2/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + bigquery_migration_v2alpha/services + bigquery_migration_v2alpha/types + + +Changelog +--------- + +For a list of all ``google-cloud-bigquery-migration`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-bigquery-migration/docs/multiprocessing.rst b/packages/google-cloud-bigquery-migration/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/__init__.py new file mode 100644 index 000000000000..1698cd4b0aaf --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/__init__.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery_migration import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.bigquery_migration_v2.services.migration_service.async_client import ( + MigrationServiceAsyncClient, +) +from google.cloud.bigquery_migration_v2.services.migration_service.client import ( + MigrationServiceClient, +) +from google.cloud.bigquery_migration_v2.types.migration_entities import ( + MigrationSubtask, + MigrationTask, + MigrationWorkflow, +) +from google.cloud.bigquery_migration_v2.types.migration_error_details import ( + ErrorDetail, + ErrorLocation, + ResourceErrorDetail, +) +from google.cloud.bigquery_migration_v2.types.migration_metrics import ( + Point, + TimeInterval, + TimeSeries, + TypedValue, +) +from google.cloud.bigquery_migration_v2.types.migration_service import ( + CreateMigrationWorkflowRequest, + DeleteMigrationWorkflowRequest, + GetMigrationSubtaskRequest, + GetMigrationWorkflowRequest, + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + StartMigrationWorkflowRequest, +) +from google.cloud.bigquery_migration_v2.types.translation_config import ( + AzureSynapseDialect, + BigQueryDialect, + Dialect, + HiveQLDialect, + MySQLDialect, + NameMappingKey, + NameMappingValue, + NetezzaDialect, + ObjectNameMapping, + ObjectNameMappingList, + OracleDialect, + PostgresqlDialect, + PrestoDialect, + RedshiftDialect, + SnowflakeDialect, + SourceEnv, + SparkSQLDialect, + SQLServerDialect, + TeradataDialect, + TranslationConfigDetails, + VerticaDialect, +) + +__all__ = ( + "MigrationServiceClient", + "MigrationServiceAsyncClient", + "MigrationSubtask", + "MigrationTask", + "MigrationWorkflow", + "ErrorDetail", + "ErrorLocation", + "ResourceErrorDetail", + "Point", + "TimeInterval", + "TimeSeries", + "TypedValue", + "CreateMigrationWorkflowRequest", + "DeleteMigrationWorkflowRequest", + "GetMigrationSubtaskRequest", + "GetMigrationWorkflowRequest", + "ListMigrationSubtasksRequest", + "ListMigrationSubtasksResponse", + "ListMigrationWorkflowsRequest", + "ListMigrationWorkflowsResponse", + "StartMigrationWorkflowRequest", + "AzureSynapseDialect", + "BigQueryDialect", + "Dialect", + "HiveQLDialect", + "MySQLDialect", + "NameMappingKey", + "NameMappingValue", + "NetezzaDialect", + "ObjectNameMapping", + "ObjectNameMappingList", + "OracleDialect", + "PostgresqlDialect", + "PrestoDialect", + "RedshiftDialect", + "SnowflakeDialect", + "SourceEnv", + "SparkSQLDialect", + "SQLServerDialect", + "TeradataDialect", + "TranslationConfigDetails", + "VerticaDialect", +) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py new file mode 100644 index 000000000000..c48c3cf53d40 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.11.2" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/py.typed b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/py.typed new file mode 100644 index 000000000000..fcbd0b586dd2 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-migration package uses inline types. diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/__init__.py new file mode 100644 index 000000000000..8b975fa832aa --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/__init__.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery_migration_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.migration_service import ( + MigrationServiceAsyncClient, + MigrationServiceClient, +) +from .types.migration_entities import MigrationSubtask, MigrationTask, MigrationWorkflow +from .types.migration_error_details import ( + ErrorDetail, + ErrorLocation, + ResourceErrorDetail, +) +from .types.migration_metrics import Point, TimeInterval, TimeSeries, TypedValue +from .types.migration_service import ( + CreateMigrationWorkflowRequest, + DeleteMigrationWorkflowRequest, + GetMigrationSubtaskRequest, + GetMigrationWorkflowRequest, + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + StartMigrationWorkflowRequest, +) +from .types.translation_config import ( + AzureSynapseDialect, + BigQueryDialect, + Dialect, + HiveQLDialect, + MySQLDialect, + NameMappingKey, + NameMappingValue, + NetezzaDialect, + ObjectNameMapping, + ObjectNameMappingList, + OracleDialect, + PostgresqlDialect, + PrestoDialect, + RedshiftDialect, + SnowflakeDialect, + SourceEnv, + SparkSQLDialect, + SQLServerDialect, + TeradataDialect, + TranslationConfigDetails, + VerticaDialect, +) + +__all__ = ( + "MigrationServiceAsyncClient", + "AzureSynapseDialect", + "BigQueryDialect", + "CreateMigrationWorkflowRequest", + "DeleteMigrationWorkflowRequest", + "Dialect", + "ErrorDetail", + "ErrorLocation", + "GetMigrationSubtaskRequest", + "GetMigrationWorkflowRequest", + "HiveQLDialect", + "ListMigrationSubtasksRequest", + "ListMigrationSubtasksResponse", + "ListMigrationWorkflowsRequest", + "ListMigrationWorkflowsResponse", + "MigrationServiceClient", + "MigrationSubtask", + "MigrationTask", + "MigrationWorkflow", + "MySQLDialect", + "NameMappingKey", + "NameMappingValue", + "NetezzaDialect", + "ObjectNameMapping", + "ObjectNameMappingList", + "OracleDialect", + "Point", + "PostgresqlDialect", + "PrestoDialect", + "RedshiftDialect", + "ResourceErrorDetail", + "SQLServerDialect", + "SnowflakeDialect", + "SourceEnv", + "SparkSQLDialect", + "StartMigrationWorkflowRequest", + "TeradataDialect", + "TimeInterval", + "TimeSeries", + "TranslationConfigDetails", + "TypedValue", + "VerticaDialect", +) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_metadata.json b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_metadata.json new file mode 100644 index 000000000000..f841186e6e8d --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_metadata.json @@ -0,0 +1,93 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bigquery_migration_v2", + "protoPackage": "google.cloud.bigquery.migration.v2", + "schema": "1.0", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "create_migration_workflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "delete_migration_workflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "get_migration_subtask" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "get_migration_workflow" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "list_migration_subtasks" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "list_migration_workflows" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "start_migration_workflow" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MigrationServiceAsyncClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "create_migration_workflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "delete_migration_workflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "get_migration_subtask" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "get_migration_workflow" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "list_migration_subtasks" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "list_migration_workflows" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "start_migration_workflow" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py new file mode 100644 index 000000000000..c48c3cf53d40 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.11.2" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/py.typed b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/py.typed new file mode 100644 index 000000000000..fcbd0b586dd2 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-migration package uses inline types. diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/__init__.py new file mode 100644 index 000000000000..c56e03eca2b3 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MigrationServiceAsyncClient +from .client import MigrationServiceClient + +__all__ = ( + "MigrationServiceClient", + "MigrationServiceAsyncClient", +) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py new file mode 100644 index 000000000000..d9ad8c3c5049 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py @@ -0,0 +1,1026 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_migration_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import error_details_pb2 # type: ignore + +from google.cloud.bigquery_migration_v2.services.migration_service import pagers +from google.cloud.bigquery_migration_v2.types import ( + migration_entities, + migration_error_details, + migration_metrics, + migration_service, +) + +from .client import MigrationServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, MigrationServiceTransport +from .transports.grpc_asyncio import MigrationServiceGrpcAsyncIOTransport + + +class MigrationServiceAsyncClient: + """Service to handle EDW migrations.""" + + _client: MigrationServiceClient + + DEFAULT_ENDPOINT = MigrationServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MigrationServiceClient.DEFAULT_MTLS_ENDPOINT + + migration_subtask_path = staticmethod(MigrationServiceClient.migration_subtask_path) + parse_migration_subtask_path = staticmethod( + MigrationServiceClient.parse_migration_subtask_path + ) + migration_workflow_path = staticmethod( + MigrationServiceClient.migration_workflow_path + ) + parse_migration_workflow_path = staticmethod( + MigrationServiceClient.parse_migration_workflow_path + ) + common_billing_account_path = staticmethod( + MigrationServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MigrationServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(MigrationServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + MigrationServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + MigrationServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + MigrationServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(MigrationServiceClient.common_project_path) + parse_common_project_path = staticmethod( + MigrationServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(MigrationServiceClient.common_location_path) + parse_common_location_path = staticmethod( + MigrationServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MigrationServiceAsyncClient: The constructed client. + """ + return MigrationServiceClient.from_service_account_info.__func__(MigrationServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MigrationServiceAsyncClient: The constructed client. + """ + return MigrationServiceClient.from_service_account_file.__func__(MigrationServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MigrationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MigrationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MigrationServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(MigrationServiceClient).get_transport_class, type(MigrationServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MigrationServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the migration service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MigrationServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MigrationServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_migration_workflow( + self, + request: Optional[ + Union[migration_service.CreateMigrationWorkflowRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + migration_workflow: Optional[migration_entities.MigrationWorkflow] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationWorkflow: + r"""Creates a migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + async def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_migration_workflow(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2.types.CreateMigrationWorkflowRequest, dict]]): + The request object. Request to create a migration + workflow resource. + parent (:class:`str`): + Required. The name of the project to which this + migration workflow belongs. Example: + ``projects/foo/locations/bar`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_workflow (:class:`google.cloud.bigquery_migration_v2.types.MigrationWorkflow`): + Required. The migration workflow to + create. + + This corresponds to the ``migration_workflow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2.types.MigrationWorkflow: + A migration workflow which specifies + what needs to be done for an EDW + migration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, migration_workflow]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.CreateMigrationWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if migration_workflow is not None: + request.migration_workflow = migration_workflow + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_migration_workflow, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_migration_workflow( + self, + request: Optional[ + Union[migration_service.GetMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationWorkflow: + r"""Gets a previously created migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + async def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_workflow(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2.types.GetMigrationWorkflowRequest, dict]]): + The request object. A request to get a previously created + migration workflow. + name (:class:`str`): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2.types.MigrationWorkflow: + A migration workflow which specifies + what needs to be done for an EDW + migration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.GetMigrationWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_migration_workflow, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_migration_workflows( + self, + request: Optional[ + Union[migration_service.ListMigrationWorkflowsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationWorkflowsAsyncPager: + r"""Lists previously created migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + async def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest, dict]]): + The request object. A request to list previously created + migration workflows. + parent (:class:`str`): + Required. The project and location of the migration + workflows to list. Example: + ``projects/123/locations/us`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationWorkflowsAsyncPager: + Response object for a ListMigrationWorkflows call. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.ListMigrationWorkflowsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_migration_workflows, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMigrationWorkflowsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_migration_workflow( + self, + request: Optional[ + Union[migration_service.DeleteMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a migration workflow by name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + async def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + await client.delete_migration_workflow(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2.types.DeleteMigrationWorkflowRequest, dict]]): + The request object. A request to delete a previously + created migration workflow. + name (:class:`str`): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.DeleteMigrationWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_migration_workflow, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def start_migration_workflow( + self, + request: Optional[ + Union[migration_service.StartMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts a previously created migration workflow. I.e., + the state transitions from DRAFT to RUNNING. This is a + no-op if the state is already RUNNING. An error will be + signaled if the state is anything other than DRAFT or + RUNNING. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + async def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + await client.start_migration_workflow(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2.types.StartMigrationWorkflowRequest, dict]]): + The request object. A request to start a previously + created migration workflow. + name (:class:`str`): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.StartMigrationWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_migration_workflow, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_migration_subtask( + self, + request: Optional[ + Union[migration_service.GetMigrationSubtaskRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationSubtask: + r"""Gets a previously created migration subtask. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + async def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_subtask(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2.types.GetMigrationSubtaskRequest, dict]]): + The request object. A request to get a previously created + migration subtasks. + name (:class:`str`): + Required. The unique identifier for the migration + subtask. Example: + ``projects/123/locations/us/workflows/1234/subtasks/543`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2.types.MigrationSubtask: + A subtask for a migration which + carries details about the configuration + of the subtask. The content of the + details should not matter to the end + user, but is a contract between the + subtask creator and subtask worker. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.GetMigrationSubtaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_migration_subtask, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_migration_subtasks( + self, + request: Optional[ + Union[migration_service.ListMigrationSubtasksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationSubtasksAsyncPager: + r"""Lists previously created migration subtasks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + async def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest, dict]]): + The request object. A request to list previously created + migration subtasks. + parent (:class:`str`): + Required. The migration task of the subtasks to list. + Example: ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationSubtasksAsyncPager: + Response object for a ListMigrationSubtasks call. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.ListMigrationSubtasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_migration_subtasks, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMigrationSubtasksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "MigrationServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MigrationServiceAsyncClient",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py new file mode 100644 index 000000000000..5fb222ba2eef --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py @@ -0,0 +1,1253 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_migration_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import error_details_pb2 # type: ignore + +from google.cloud.bigquery_migration_v2.services.migration_service import pagers +from google.cloud.bigquery_migration_v2.types import ( + migration_entities, + migration_error_details, + migration_metrics, + migration_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, MigrationServiceTransport +from .transports.grpc import MigrationServiceGrpcTransport +from .transports.grpc_asyncio import MigrationServiceGrpcAsyncIOTransport + + +class MigrationServiceClientMeta(type): + """Metaclass for the MigrationService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MigrationServiceTransport]] + _transport_registry["grpc"] = MigrationServiceGrpcTransport + _transport_registry["grpc_asyncio"] = MigrationServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MigrationServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MigrationServiceClient(metaclass=MigrationServiceClientMeta): + """Service to handle EDW migrations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "bigquerymigration.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MigrationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MigrationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MigrationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MigrationServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def migration_subtask_path( + project: str, + location: str, + workflow: str, + subtask: str, + ) -> str: + """Returns a fully-qualified migration_subtask string.""" + return "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}".format( + project=project, + location=location, + workflow=workflow, + subtask=subtask, + ) + + @staticmethod + def parse_migration_subtask_path(path: str) -> Dict[str, str]: + """Parses a migration_subtask path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)/subtasks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def migration_workflow_path( + project: str, + location: str, + workflow: str, + ) -> str: + """Returns a fully-qualified migration_workflow string.""" + return "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, + location=location, + workflow=workflow, + ) + + @staticmethod + def parse_migration_workflow_path(path: str) -> Dict[str, str]: + """Parses a migration_workflow path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MigrationServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the migration service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MigrationServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MigrationServiceTransport): + # transport is a MigrationServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_migration_workflow( + self, + request: Optional[ + Union[migration_service.CreateMigrationWorkflowRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + migration_workflow: Optional[migration_entities.MigrationWorkflow] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationWorkflow: + r"""Creates a migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_migration_workflow(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_migration_v2.types.CreateMigrationWorkflowRequest, dict]): + The request object. Request to create a migration + workflow resource. + parent (str): + Required. The name of the project to which this + migration workflow belongs. Example: + ``projects/foo/locations/bar`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_workflow (google.cloud.bigquery_migration_v2.types.MigrationWorkflow): + Required. The migration workflow to + create. + + This corresponds to the ``migration_workflow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2.types.MigrationWorkflow: + A migration workflow which specifies + what needs to be done for an EDW + migration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, migration_workflow]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.CreateMigrationWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.CreateMigrationWorkflowRequest): + request = migration_service.CreateMigrationWorkflowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if migration_workflow is not None: + request.migration_workflow = migration_workflow + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_migration_workflow + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_migration_workflow( + self, + request: Optional[ + Union[migration_service.GetMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationWorkflow: + r"""Gets a previously created migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_workflow(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_migration_v2.types.GetMigrationWorkflowRequest, dict]): + The request object. A request to get a previously created + migration workflow. + name (str): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2.types.MigrationWorkflow: + A migration workflow which specifies + what needs to be done for an EDW + migration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.GetMigrationWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.GetMigrationWorkflowRequest): + request = migration_service.GetMigrationWorkflowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_migration_workflow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_migration_workflows( + self, + request: Optional[ + Union[migration_service.ListMigrationWorkflowsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationWorkflowsPager: + r"""Lists previously created migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest, dict]): + The request object. A request to list previously created + migration workflows. + parent (str): + Required. The project and location of the migration + workflows to list. Example: + ``projects/123/locations/us`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationWorkflowsPager: + Response object for a ListMigrationWorkflows call. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.ListMigrationWorkflowsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.ListMigrationWorkflowsRequest): + request = migration_service.ListMigrationWorkflowsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_migration_workflows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMigrationWorkflowsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_migration_workflow( + self, + request: Optional[ + Union[migration_service.DeleteMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a migration workflow by name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.delete_migration_workflow(request=request) + + Args: + request (Union[google.cloud.bigquery_migration_v2.types.DeleteMigrationWorkflowRequest, dict]): + The request object. A request to delete a previously + created migration workflow. + name (str): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.DeleteMigrationWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.DeleteMigrationWorkflowRequest): + request = migration_service.DeleteMigrationWorkflowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_migration_workflow + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def start_migration_workflow( + self, + request: Optional[ + Union[migration_service.StartMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts a previously created migration workflow. I.e., + the state transitions from DRAFT to RUNNING. This is a + no-op if the state is already RUNNING. An error will be + signaled if the state is anything other than DRAFT or + RUNNING. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.start_migration_workflow(request=request) + + Args: + request (Union[google.cloud.bigquery_migration_v2.types.StartMigrationWorkflowRequest, dict]): + The request object. A request to start a previously + created migration workflow. + name (str): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.StartMigrationWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.StartMigrationWorkflowRequest): + request = migration_service.StartMigrationWorkflowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_migration_workflow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_migration_subtask( + self, + request: Optional[ + Union[migration_service.GetMigrationSubtaskRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationSubtask: + r"""Gets a previously created migration subtask. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_subtask(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_migration_v2.types.GetMigrationSubtaskRequest, dict]): + The request object. A request to get a previously created + migration subtasks. + name (str): + Required. The unique identifier for the migration + subtask. Example: + ``projects/123/locations/us/workflows/1234/subtasks/543`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2.types.MigrationSubtask: + A subtask for a migration which + carries details about the configuration + of the subtask. The content of the + details should not matter to the end + user, but is a contract between the + subtask creator and subtask worker. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.GetMigrationSubtaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.GetMigrationSubtaskRequest): + request = migration_service.GetMigrationSubtaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_migration_subtask] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_migration_subtasks( + self, + request: Optional[ + Union[migration_service.ListMigrationSubtasksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationSubtasksPager: + r"""Lists previously created migration subtasks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2 + + def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest, dict]): + The request object. A request to list previously created + migration subtasks. + parent (str): + Required. The migration task of the subtasks to list. + Example: ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationSubtasksPager: + Response object for a ListMigrationSubtasks call. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.ListMigrationSubtasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.ListMigrationSubtasksRequest): + request = migration_service.ListMigrationSubtasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_migration_subtasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMigrationSubtasksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MigrationServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MigrationServiceClient",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/pagers.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/pagers.py new file mode 100644 index 000000000000..f5e369fc8743 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/pagers.py @@ -0,0 +1,294 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.bigquery_migration_v2.types import ( + migration_entities, + migration_service, +) + + +class ListMigrationWorkflowsPager: + """A pager for iterating through ``list_migration_workflows`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``migration_workflows`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMigrationWorkflows`` requests and continue to iterate + through the ``migration_workflows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., migration_service.ListMigrationWorkflowsResponse], + request: migration_service.ListMigrationWorkflowsRequest, + response: migration_service.ListMigrationWorkflowsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest): + The initial request object. + response (google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = migration_service.ListMigrationWorkflowsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[migration_service.ListMigrationWorkflowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[migration_entities.MigrationWorkflow]: + for page in self.pages: + yield from page.migration_workflows + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMigrationWorkflowsAsyncPager: + """A pager for iterating through ``list_migration_workflows`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``migration_workflows`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMigrationWorkflows`` requests and continue to iterate + through the ``migration_workflows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[migration_service.ListMigrationWorkflowsResponse] + ], + request: migration_service.ListMigrationWorkflowsRequest, + response: migration_service.ListMigrationWorkflowsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest): + The initial request object. + response (google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = migration_service.ListMigrationWorkflowsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[migration_service.ListMigrationWorkflowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[migration_entities.MigrationWorkflow]: + async def async_generator(): + async for page in self.pages: + for response in page.migration_workflows: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMigrationSubtasksPager: + """A pager for iterating through ``list_migration_subtasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``migration_subtasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMigrationSubtasks`` requests and continue to iterate + through the ``migration_subtasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., migration_service.ListMigrationSubtasksResponse], + request: migration_service.ListMigrationSubtasksRequest, + response: migration_service.ListMigrationSubtasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest): + The initial request object. + response (google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = migration_service.ListMigrationSubtasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[migration_service.ListMigrationSubtasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[migration_entities.MigrationSubtask]: + for page in self.pages: + yield from page.migration_subtasks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMigrationSubtasksAsyncPager: + """A pager for iterating through ``list_migration_subtasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``migration_subtasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMigrationSubtasks`` requests and continue to iterate + through the ``migration_subtasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[migration_service.ListMigrationSubtasksResponse] + ], + request: migration_service.ListMigrationSubtasksRequest, + response: migration_service.ListMigrationSubtasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest): + The initial request object. + response (google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = migration_service.ListMigrationSubtasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[migration_service.ListMigrationSubtasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[migration_entities.MigrationSubtask]: + async def async_generator(): + async for page in self.pages: + for response in page.migration_subtasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/__init__.py new file mode 100644 index 000000000000..b4ab5eab1913 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MigrationServiceTransport +from .grpc import MigrationServiceGrpcTransport +from .grpc_asyncio import MigrationServiceGrpcAsyncIOTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MigrationServiceTransport]] +_transport_registry["grpc"] = MigrationServiceGrpcTransport +_transport_registry["grpc_asyncio"] = MigrationServiceGrpcAsyncIOTransport + +__all__ = ( + "MigrationServiceTransport", + "MigrationServiceGrpcTransport", + "MigrationServiceGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/base.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/base.py new file mode 100644 index 000000000000..6415b4442026 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/base.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.bigquery_migration_v2 import gapic_version as package_version +from google.cloud.bigquery_migration_v2.types import ( + migration_entities, + migration_service, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class MigrationServiceTransport(abc.ABC): + """Abstract transport class for MigrationService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "bigquerymigration.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_migration_workflow: gapic_v1.method.wrap_method( + self.create_migration_workflow, + default_timeout=60.0, + client_info=client_info, + ), + self.get_migration_workflow: gapic_v1.method.wrap_method( + self.get_migration_workflow, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_migration_workflows: gapic_v1.method.wrap_method( + self.list_migration_workflows, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_migration_workflow: gapic_v1.method.wrap_method( + self.delete_migration_workflow, + default_timeout=60.0, + client_info=client_info, + ), + self.start_migration_workflow: gapic_v1.method.wrap_method( + self.start_migration_workflow, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_migration_subtask: gapic_v1.method.wrap_method( + self.get_migration_subtask, + default_timeout=None, + client_info=client_info, + ), + self.list_migration_subtasks: gapic_v1.method.wrap_method( + self.list_migration_subtasks, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_migration_workflow( + self, + ) -> Callable[ + [migration_service.CreateMigrationWorkflowRequest], + Union[ + migration_entities.MigrationWorkflow, + Awaitable[migration_entities.MigrationWorkflow], + ], + ]: + raise NotImplementedError() + + @property + def get_migration_workflow( + self, + ) -> Callable[ + [migration_service.GetMigrationWorkflowRequest], + Union[ + migration_entities.MigrationWorkflow, + Awaitable[migration_entities.MigrationWorkflow], + ], + ]: + raise NotImplementedError() + + @property + def list_migration_workflows( + self, + ) -> Callable[ + [migration_service.ListMigrationWorkflowsRequest], + Union[ + migration_service.ListMigrationWorkflowsResponse, + Awaitable[migration_service.ListMigrationWorkflowsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_migration_workflow( + self, + ) -> Callable[ + [migration_service.DeleteMigrationWorkflowRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def start_migration_workflow( + self, + ) -> Callable[ + [migration_service.StartMigrationWorkflowRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_migration_subtask( + self, + ) -> Callable[ + [migration_service.GetMigrationSubtaskRequest], + Union[ + migration_entities.MigrationSubtask, + Awaitable[migration_entities.MigrationSubtask], + ], + ]: + raise NotImplementedError() + + @property + def list_migration_subtasks( + self, + ) -> Callable[ + [migration_service.ListMigrationSubtasksRequest], + Union[ + migration_service.ListMigrationSubtasksResponse, + Awaitable[migration_service.ListMigrationSubtasksResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MigrationServiceTransport",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc.py new file mode 100644 index 000000000000..7e665b35bb5a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc.py @@ -0,0 +1,444 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.bigquery_migration_v2.types import ( + migration_entities, + migration_service, +) + +from .base import DEFAULT_CLIENT_INFO, MigrationServiceTransport + + +class MigrationServiceGrpcTransport(MigrationServiceTransport): + """gRPC backend transport for MigrationService. + + Service to handle EDW migrations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_migration_workflow( + self, + ) -> Callable[ + [migration_service.CreateMigrationWorkflowRequest], + migration_entities.MigrationWorkflow, + ]: + r"""Return a callable for the create migration workflow method over gRPC. + + Creates a migration workflow. + + Returns: + Callable[[~.CreateMigrationWorkflowRequest], + ~.MigrationWorkflow]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_migration_workflow" not in self._stubs: + self._stubs["create_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/CreateMigrationWorkflow", + request_serializer=migration_service.CreateMigrationWorkflowRequest.serialize, + response_deserializer=migration_entities.MigrationWorkflow.deserialize, + ) + return self._stubs["create_migration_workflow"] + + @property + def get_migration_workflow( + self, + ) -> Callable[ + [migration_service.GetMigrationWorkflowRequest], + migration_entities.MigrationWorkflow, + ]: + r"""Return a callable for the get migration workflow method over gRPC. + + Gets a previously created migration workflow. + + Returns: + Callable[[~.GetMigrationWorkflowRequest], + ~.MigrationWorkflow]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_migration_workflow" not in self._stubs: + self._stubs["get_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/GetMigrationWorkflow", + request_serializer=migration_service.GetMigrationWorkflowRequest.serialize, + response_deserializer=migration_entities.MigrationWorkflow.deserialize, + ) + return self._stubs["get_migration_workflow"] + + @property + def list_migration_workflows( + self, + ) -> Callable[ + [migration_service.ListMigrationWorkflowsRequest], + migration_service.ListMigrationWorkflowsResponse, + ]: + r"""Return a callable for the list migration workflows method over gRPC. + + Lists previously created migration workflow. + + Returns: + Callable[[~.ListMigrationWorkflowsRequest], + ~.ListMigrationWorkflowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_migration_workflows" not in self._stubs: + self._stubs["list_migration_workflows"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/ListMigrationWorkflows", + request_serializer=migration_service.ListMigrationWorkflowsRequest.serialize, + response_deserializer=migration_service.ListMigrationWorkflowsResponse.deserialize, + ) + return self._stubs["list_migration_workflows"] + + @property + def delete_migration_workflow( + self, + ) -> Callable[[migration_service.DeleteMigrationWorkflowRequest], empty_pb2.Empty]: + r"""Return a callable for the delete migration workflow method over gRPC. + + Deletes a migration workflow by name. + + Returns: + Callable[[~.DeleteMigrationWorkflowRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_migration_workflow" not in self._stubs: + self._stubs["delete_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/DeleteMigrationWorkflow", + request_serializer=migration_service.DeleteMigrationWorkflowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_migration_workflow"] + + @property + def start_migration_workflow( + self, + ) -> Callable[[migration_service.StartMigrationWorkflowRequest], empty_pb2.Empty]: + r"""Return a callable for the start migration workflow method over gRPC. + + Starts a previously created migration workflow. I.e., + the state transitions from DRAFT to RUNNING. This is a + no-op if the state is already RUNNING. An error will be + signaled if the state is anything other than DRAFT or + RUNNING. + + Returns: + Callable[[~.StartMigrationWorkflowRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_migration_workflow" not in self._stubs: + self._stubs["start_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/StartMigrationWorkflow", + request_serializer=migration_service.StartMigrationWorkflowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["start_migration_workflow"] + + @property + def get_migration_subtask( + self, + ) -> Callable[ + [migration_service.GetMigrationSubtaskRequest], + migration_entities.MigrationSubtask, + ]: + r"""Return a callable for the get migration subtask method over gRPC. + + Gets a previously created migration subtask. + + Returns: + Callable[[~.GetMigrationSubtaskRequest], + ~.MigrationSubtask]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_migration_subtask" not in self._stubs: + self._stubs["get_migration_subtask"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/GetMigrationSubtask", + request_serializer=migration_service.GetMigrationSubtaskRequest.serialize, + response_deserializer=migration_entities.MigrationSubtask.deserialize, + ) + return self._stubs["get_migration_subtask"] + + @property + def list_migration_subtasks( + self, + ) -> Callable[ + [migration_service.ListMigrationSubtasksRequest], + migration_service.ListMigrationSubtasksResponse, + ]: + r"""Return a callable for the list migration subtasks method over gRPC. + + Lists previously created migration subtasks. + + Returns: + Callable[[~.ListMigrationSubtasksRequest], + ~.ListMigrationSubtasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_migration_subtasks" not in self._stubs: + self._stubs["list_migration_subtasks"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/ListMigrationSubtasks", + request_serializer=migration_service.ListMigrationSubtasksRequest.serialize, + response_deserializer=migration_service.ListMigrationSubtasksResponse.deserialize, + ) + return self._stubs["list_migration_subtasks"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MigrationServiceGrpcTransport",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c474e3f5943a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc_asyncio.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.bigquery_migration_v2.types import ( + migration_entities, + migration_service, +) + +from .base import DEFAULT_CLIENT_INFO, MigrationServiceTransport +from .grpc import MigrationServiceGrpcTransport + + +class MigrationServiceGrpcAsyncIOTransport(MigrationServiceTransport): + """gRPC AsyncIO backend transport for MigrationService. + + Service to handle EDW migrations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_migration_workflow( + self, + ) -> Callable[ + [migration_service.CreateMigrationWorkflowRequest], + Awaitable[migration_entities.MigrationWorkflow], + ]: + r"""Return a callable for the create migration workflow method over gRPC. + + Creates a migration workflow. + + Returns: + Callable[[~.CreateMigrationWorkflowRequest], + Awaitable[~.MigrationWorkflow]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_migration_workflow" not in self._stubs: + self._stubs["create_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/CreateMigrationWorkflow", + request_serializer=migration_service.CreateMigrationWorkflowRequest.serialize, + response_deserializer=migration_entities.MigrationWorkflow.deserialize, + ) + return self._stubs["create_migration_workflow"] + + @property + def get_migration_workflow( + self, + ) -> Callable[ + [migration_service.GetMigrationWorkflowRequest], + Awaitable[migration_entities.MigrationWorkflow], + ]: + r"""Return a callable for the get migration workflow method over gRPC. + + Gets a previously created migration workflow. + + Returns: + Callable[[~.GetMigrationWorkflowRequest], + Awaitable[~.MigrationWorkflow]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_migration_workflow" not in self._stubs: + self._stubs["get_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/GetMigrationWorkflow", + request_serializer=migration_service.GetMigrationWorkflowRequest.serialize, + response_deserializer=migration_entities.MigrationWorkflow.deserialize, + ) + return self._stubs["get_migration_workflow"] + + @property + def list_migration_workflows( + self, + ) -> Callable[ + [migration_service.ListMigrationWorkflowsRequest], + Awaitable[migration_service.ListMigrationWorkflowsResponse], + ]: + r"""Return a callable for the list migration workflows method over gRPC. + + Lists previously created migration workflow. + + Returns: + Callable[[~.ListMigrationWorkflowsRequest], + Awaitable[~.ListMigrationWorkflowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_migration_workflows" not in self._stubs: + self._stubs["list_migration_workflows"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/ListMigrationWorkflows", + request_serializer=migration_service.ListMigrationWorkflowsRequest.serialize, + response_deserializer=migration_service.ListMigrationWorkflowsResponse.deserialize, + ) + return self._stubs["list_migration_workflows"] + + @property + def delete_migration_workflow( + self, + ) -> Callable[ + [migration_service.DeleteMigrationWorkflowRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete migration workflow method over gRPC. + + Deletes a migration workflow by name. + + Returns: + Callable[[~.DeleteMigrationWorkflowRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_migration_workflow" not in self._stubs: + self._stubs["delete_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/DeleteMigrationWorkflow", + request_serializer=migration_service.DeleteMigrationWorkflowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_migration_workflow"] + + @property + def start_migration_workflow( + self, + ) -> Callable[ + [migration_service.StartMigrationWorkflowRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the start migration workflow method over gRPC. + + Starts a previously created migration workflow. I.e., + the state transitions from DRAFT to RUNNING. This is a + no-op if the state is already RUNNING. An error will be + signaled if the state is anything other than DRAFT or + RUNNING. + + Returns: + Callable[[~.StartMigrationWorkflowRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_migration_workflow" not in self._stubs: + self._stubs["start_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/StartMigrationWorkflow", + request_serializer=migration_service.StartMigrationWorkflowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["start_migration_workflow"] + + @property + def get_migration_subtask( + self, + ) -> Callable[ + [migration_service.GetMigrationSubtaskRequest], + Awaitable[migration_entities.MigrationSubtask], + ]: + r"""Return a callable for the get migration subtask method over gRPC. + + Gets a previously created migration subtask. + + Returns: + Callable[[~.GetMigrationSubtaskRequest], + Awaitable[~.MigrationSubtask]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_migration_subtask" not in self._stubs: + self._stubs["get_migration_subtask"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/GetMigrationSubtask", + request_serializer=migration_service.GetMigrationSubtaskRequest.serialize, + response_deserializer=migration_entities.MigrationSubtask.deserialize, + ) + return self._stubs["get_migration_subtask"] + + @property + def list_migration_subtasks( + self, + ) -> Callable[ + [migration_service.ListMigrationSubtasksRequest], + Awaitable[migration_service.ListMigrationSubtasksResponse], + ]: + r"""Return a callable for the list migration subtasks method over gRPC. + + Lists previously created migration subtasks. + + Returns: + Callable[[~.ListMigrationSubtasksRequest], + Awaitable[~.ListMigrationSubtasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_migration_subtasks" not in self._stubs: + self._stubs["list_migration_subtasks"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2.MigrationService/ListMigrationSubtasks", + request_serializer=migration_service.ListMigrationSubtasksRequest.serialize, + response_deserializer=migration_service.ListMigrationSubtasksResponse.deserialize, + ) + return self._stubs["list_migration_subtasks"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("MigrationServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/__init__.py new file mode 100644 index 000000000000..9af06a73498d --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/__init__.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .migration_entities import MigrationSubtask, MigrationTask, MigrationWorkflow +from .migration_error_details import ErrorDetail, ErrorLocation, ResourceErrorDetail +from .migration_metrics import Point, TimeInterval, TimeSeries, TypedValue +from .migration_service import ( + CreateMigrationWorkflowRequest, + DeleteMigrationWorkflowRequest, + GetMigrationSubtaskRequest, + GetMigrationWorkflowRequest, + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + StartMigrationWorkflowRequest, +) +from .translation_config import ( + AzureSynapseDialect, + BigQueryDialect, + Dialect, + HiveQLDialect, + MySQLDialect, + NameMappingKey, + NameMappingValue, + NetezzaDialect, + ObjectNameMapping, + ObjectNameMappingList, + OracleDialect, + PostgresqlDialect, + PrestoDialect, + RedshiftDialect, + SnowflakeDialect, + SourceEnv, + SparkSQLDialect, + SQLServerDialect, + TeradataDialect, + TranslationConfigDetails, + VerticaDialect, +) + +__all__ = ( + "MigrationSubtask", + "MigrationTask", + "MigrationWorkflow", + "ErrorDetail", + "ErrorLocation", + "ResourceErrorDetail", + "Point", + "TimeInterval", + "TimeSeries", + "TypedValue", + "CreateMigrationWorkflowRequest", + "DeleteMigrationWorkflowRequest", + "GetMigrationSubtaskRequest", + "GetMigrationWorkflowRequest", + "ListMigrationSubtasksRequest", + "ListMigrationSubtasksResponse", + "ListMigrationWorkflowsRequest", + "ListMigrationWorkflowsResponse", + "StartMigrationWorkflowRequest", + "AzureSynapseDialect", + "BigQueryDialect", + "Dialect", + "HiveQLDialect", + "MySQLDialect", + "NameMappingKey", + "NameMappingValue", + "NetezzaDialect", + "ObjectNameMapping", + "ObjectNameMappingList", + "OracleDialect", + "PostgresqlDialect", + "PrestoDialect", + "RedshiftDialect", + "SnowflakeDialect", + "SourceEnv", + "SparkSQLDialect", + "SQLServerDialect", + "TeradataDialect", + "TranslationConfigDetails", + "VerticaDialect", +) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_entities.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_entities.py new file mode 100644 index 000000000000..1482989e648a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_entities.py @@ -0,0 +1,360 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import error_details_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.bigquery_migration_v2.types import ( + migration_error_details, + migration_metrics, + translation_config, +) + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2", + manifest={ + "MigrationWorkflow", + "MigrationTask", + "MigrationSubtask", + }, +) + + +class MigrationWorkflow(proto.Message): + r"""A migration workflow which specifies what needs to be done + for an EDW migration. + + Attributes: + name (str): + Output only. Immutable. The unique identifier for the + migration workflow. The ID is server-generated. + + Example: ``projects/123/locations/us/workflows/345`` + display_name (str): + The display name of the workflow. This can be + set to give a workflow a descriptive name. There + is no guarantee or enforcement of uniqueness. + tasks (MutableMapping[str, google.cloud.bigquery_migration_v2.types.MigrationTask]): + The tasks in a workflow in a named map. The + name (i.e. key) has no meaning and is merely a + convenient way to address a specific task in a + workflow. + state (google.cloud.bigquery_migration_v2.types.MigrationWorkflow.State): + Output only. That status of the workflow. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the workflow was created. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the workflow was last updated. + """ + + class State(proto.Enum): + r"""Possible migration workflow states. + + Values: + STATE_UNSPECIFIED (0): + Workflow state is unspecified. + DRAFT (1): + Workflow is in draft status, i.e. tasks are + not yet eligible for execution. + RUNNING (2): + Workflow is running (i.e. tasks are eligible + for execution). + PAUSED (3): + Workflow is paused. Tasks currently in + progress may continue, but no further tasks will + be scheduled. + COMPLETED (4): + Workflow is complete. There should not be any + task in a non-terminal state, but if they are + (e.g. forced termination), they will not be + scheduled. + """ + STATE_UNSPECIFIED = 0 + DRAFT = 1 + RUNNING = 2 + PAUSED = 3 + COMPLETED = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + tasks: MutableMapping[str, "MigrationTask"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="MigrationTask", + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class MigrationTask(proto.Message): + r"""A single task for a migration which has details about the + configuration of the task. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + translation_config_details (google.cloud.bigquery_migration_v2.types.TranslationConfigDetails): + Task configuration for Batch SQL Translation. + + This field is a member of `oneof`_ ``task_details``. + id (str): + Output only. Immutable. The unique identifier + for the migration task. The ID is + server-generated. + type_ (str): + The type of the task. This must be one of the supported task + types: Translation_Teradata2BQ, Translation_Redshift2BQ, + Translation_Bteq2BQ, Translation_Oracle2BQ, + Translation_HiveQL2BQ, Translation_SparkSQL2BQ, + Translation_Snowflake2BQ, Translation_Netezza2BQ, + Translation_AzureSynapse2BQ, Translation_Vertica2BQ, + Translation_SQLServer2BQ, Translation_Presto2BQ, + Translation_MySQL2BQ, Translation_Postgresql2BQ. + state (google.cloud.bigquery_migration_v2.types.MigrationTask.State): + Output only. The current state of the task. + processing_error (google.rpc.error_details_pb2.ErrorInfo): + Output only. An explanation that may be + populated when the task is in FAILED state. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the task was created. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the task was last updated. + """ + + class State(proto.Enum): + r"""Possible states of a migration task. + + Values: + STATE_UNSPECIFIED (0): + The state is unspecified. + PENDING (1): + The task is waiting for orchestration. + ORCHESTRATING (2): + The task is assigned to an orchestrator. + RUNNING (3): + The task is running, i.e. its subtasks are + ready for execution. + PAUSED (4): + Tha task is paused. Assigned subtasks can + continue, but no new subtasks will be scheduled. + SUCCEEDED (5): + The task finished successfully. + FAILED (6): + The task finished unsuccessfully. + """ + STATE_UNSPECIFIED = 0 + PENDING = 1 + ORCHESTRATING = 2 + RUNNING = 3 + PAUSED = 4 + SUCCEEDED = 5 + FAILED = 6 + + translation_config_details: translation_config.TranslationConfigDetails = ( + proto.Field( + proto.MESSAGE, + number=14, + oneof="task_details", + message=translation_config.TranslationConfigDetails, + ) + ) + id: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + processing_error: error_details_pb2.ErrorInfo = proto.Field( + proto.MESSAGE, + number=5, + message=error_details_pb2.ErrorInfo, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + + +class MigrationSubtask(proto.Message): + r"""A subtask for a migration which carries details about the + configuration of the subtask. The content of the details should + not matter to the end user, but is a contract between the + subtask creator and subtask worker. + + Attributes: + name (str): + Output only. Immutable. The resource name for the migration + subtask. The ID is server-generated. + + Example: + ``projects/123/locations/us/workflows/345/subtasks/678`` + task_id (str): + The unique ID of the task to which this + subtask belongs. + type_ (str): + The type of the Subtask. The migration + service does not check whether this is a known + type. It is up to the task creator (i.e. + orchestrator or worker) to ensure it only + creates subtasks for which there are compatible + workers polling for Subtasks. + state (google.cloud.bigquery_migration_v2.types.MigrationSubtask.State): + Output only. The current state of the + subtask. + processing_error (google.rpc.error_details_pb2.ErrorInfo): + Output only. An explanation that may be + populated when the task is in FAILED state. + resource_error_details (MutableSequence[google.cloud.bigquery_migration_v2.types.ResourceErrorDetail]): + Output only. Provides details to errors and + issues encountered while processing the subtask. + Presence of error details does not mean that the + subtask failed. + resource_error_count (int): + The number or resources with errors. Note: This is not the + total number of errors as each resource can have more than + one error. This is used to indicate truncation by having a + ``resource_error_count`` that is higher than the size of + ``resource_error_details``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the subtask was created. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the subtask was last updated. + metrics (MutableSequence[google.cloud.bigquery_migration_v2.types.TimeSeries]): + The metrics for the subtask. + """ + + class State(proto.Enum): + r"""Possible states of a migration subtask. + + Values: + STATE_UNSPECIFIED (0): + The state is unspecified. + ACTIVE (1): + The subtask is ready, i.e. it is ready for + execution. + RUNNING (2): + The subtask is running, i.e. it is assigned + to a worker for execution. + SUCCEEDED (3): + The subtask finished successfully. + FAILED (4): + The subtask finished unsuccessfully. + PAUSED (5): + The subtask is paused, i.e., it will not be + scheduled. If it was already assigned,it might + still finish but no new lease renewals will be + granted. + PENDING_DEPENDENCY (6): + The subtask is pending a dependency. It will + be scheduled once its dependencies are done. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + RUNNING = 2 + SUCCEEDED = 3 + FAILED = 4 + PAUSED = 5 + PENDING_DEPENDENCY = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + task_id: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + processing_error: error_details_pb2.ErrorInfo = proto.Field( + proto.MESSAGE, + number=6, + message=error_details_pb2.ErrorInfo, + ) + resource_error_details: MutableSequence[ + migration_error_details.ResourceErrorDetail + ] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=migration_error_details.ResourceErrorDetail, + ) + resource_error_count: int = proto.Field( + proto.INT32, + number=13, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + metrics: MutableSequence[migration_metrics.TimeSeries] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message=migration_metrics.TimeSeries, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_error_details.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_error_details.py new file mode 100644 index 000000000000..98fc92b7f8ec --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_error_details.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.rpc import error_details_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2", + manifest={ + "ResourceErrorDetail", + "ErrorDetail", + "ErrorLocation", + }, +) + + +class ResourceErrorDetail(proto.Message): + r"""Provides details for errors and the corresponding resources. + + Attributes: + resource_info (google.rpc.error_details_pb2.ResourceInfo): + Required. Information about the resource + where the error is located. + error_details (MutableSequence[google.cloud.bigquery_migration_v2.types.ErrorDetail]): + Required. The error details for the resource. + error_count (int): + Required. How many errors there are in total for the + resource. Truncation can be indicated by having an + ``error_count`` that is higher than the size of + ``error_details``. + """ + + resource_info: error_details_pb2.ResourceInfo = proto.Field( + proto.MESSAGE, + number=1, + message=error_details_pb2.ResourceInfo, + ) + error_details: MutableSequence["ErrorDetail"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ErrorDetail", + ) + error_count: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ErrorDetail(proto.Message): + r"""Provides details for errors, e.g. issues that where + encountered when processing a subtask. + + Attributes: + location (google.cloud.bigquery_migration_v2.types.ErrorLocation): + Optional. The exact location within the + resource (if applicable). + error_info (google.rpc.error_details_pb2.ErrorInfo): + Required. Describes the cause of the error + with structured detail. + """ + + location: "ErrorLocation" = proto.Field( + proto.MESSAGE, + number=1, + message="ErrorLocation", + ) + error_info: error_details_pb2.ErrorInfo = proto.Field( + proto.MESSAGE, + number=2, + message=error_details_pb2.ErrorInfo, + ) + + +class ErrorLocation(proto.Message): + r"""Holds information about where the error is located. + + Attributes: + line (int): + Optional. If applicable, denotes the line + where the error occurred. A zero value means + that there is no line information. + column (int): + Optional. If applicable, denotes the column + where the error occurred. A zero value means + that there is no columns information. + """ + + line: int = proto.Field( + proto.INT32, + number=1, + ) + column: int = proto.Field( + proto.INT32, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_metrics.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_metrics.py new file mode 100644 index 000000000000..6327c6d55a00 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_metrics.py @@ -0,0 +1,211 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2", + manifest={ + "TimeSeries", + "Point", + "TimeInterval", + "TypedValue", + }, +) + + +class TimeSeries(proto.Message): + r"""The metrics object for a SubTask. + + Attributes: + metric (str): + Required. The name of the metric. + + If the metric is not known by the service yet, + it will be auto-created. + value_type (google.api.metric_pb2.ValueType): + Required. The value type of the time series. + metric_kind (google.api.metric_pb2.MetricKind): + Optional. The metric kind of the time series. + + If present, it must be the same as the metric kind of the + associated metric. If the associated metric's descriptor + must be auto-created, then this field specifies the metric + kind of the new descriptor and must be either ``GAUGE`` (the + default) or ``CUMULATIVE``. + points (MutableSequence[google.cloud.bigquery_migration_v2.types.Point]): + Required. The data points of this time series. When listing + time series, points are returned in reverse time order. + + When creating a time series, this field must contain exactly + one point and the point's type must be the same as the value + type of the associated metric. If the associated metric's + descriptor must be auto-created, then the value type of the + descriptor is determined by the point's type, which must be + ``BOOL``, ``INT64``, ``DOUBLE``, or ``DISTRIBUTION``. + """ + + metric: str = proto.Field( + proto.STRING, + number=1, + ) + value_type: metric_pb2.MetricDescriptor.ValueType = proto.Field( + proto.ENUM, + number=2, + enum=metric_pb2.MetricDescriptor.ValueType, + ) + metric_kind: metric_pb2.MetricDescriptor.MetricKind = proto.Field( + proto.ENUM, + number=3, + enum=metric_pb2.MetricDescriptor.MetricKind, + ) + points: MutableSequence["Point"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Point", + ) + + +class Point(proto.Message): + r"""A single data point in a time series. + + Attributes: + interval (google.cloud.bigquery_migration_v2.types.TimeInterval): + The time interval to which the data point applies. For + ``GAUGE`` metrics, the start time does not need to be + supplied, but if it is supplied, it must equal the end time. + For ``DELTA`` metrics, the start and end time should specify + a non-zero interval, with subsequent points specifying + contiguous and non-overlapping intervals. For ``CUMULATIVE`` + metrics, the start and end time should specify a non-zero + interval, with subsequent points specifying the same start + time and increasing end times, until an event resets the + cumulative value to zero and sets a new start time for the + following points. + value (google.cloud.bigquery_migration_v2.types.TypedValue): + The value of the data point. + """ + + interval: "TimeInterval" = proto.Field( + proto.MESSAGE, + number=1, + message="TimeInterval", + ) + value: "TypedValue" = proto.Field( + proto.MESSAGE, + number=2, + message="TypedValue", + ) + + +class TimeInterval(proto.Message): + r"""A time interval extending just after a start time through an + end time. If the start time is the same as the end time, then + the interval represents a single point in time. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The beginning of the time interval. + The default value for the start time is the end + time. The start time must not be later than the + end time. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The end of the time interval. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class TypedValue(proto.Message): + r"""A single strongly-typed value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bool_value (bool): + A Boolean value: ``true`` or ``false``. + + This field is a member of `oneof`_ ``value``. + int64_value (int): + A 64-bit integer. Its range is approximately + ``+/-9.2x10^18``. + + This field is a member of `oneof`_ ``value``. + double_value (float): + A 64-bit double-precision floating-point number. Its + magnitude is approximately ``+/-10^(+/-300)`` and it has 16 + significant digits of precision. + + This field is a member of `oneof`_ ``value``. + string_value (str): + A variable-length string value. + + This field is a member of `oneof`_ ``value``. + distribution_value (google.api.distribution_pb2.Distribution): + A distribution value. + + This field is a member of `oneof`_ ``value``. + """ + + bool_value: bool = proto.Field( + proto.BOOL, + number=1, + oneof="value", + ) + int64_value: int = proto.Field( + proto.INT64, + number=2, + oneof="value", + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=3, + oneof="value", + ) + string_value: str = proto.Field( + proto.STRING, + number=4, + oneof="value", + ) + distribution_value: distribution_pb2.Distribution = proto.Field( + proto.MESSAGE, + number=5, + oneof="value", + message=distribution_pb2.Distribution, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_service.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_service.py new file mode 100644 index 000000000000..356b0ef99fcd --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/migration_service.py @@ -0,0 +1,291 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.bigquery_migration_v2.types import migration_entities + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2", + manifest={ + "CreateMigrationWorkflowRequest", + "GetMigrationWorkflowRequest", + "ListMigrationWorkflowsRequest", + "ListMigrationWorkflowsResponse", + "DeleteMigrationWorkflowRequest", + "StartMigrationWorkflowRequest", + "GetMigrationSubtaskRequest", + "ListMigrationSubtasksRequest", + "ListMigrationSubtasksResponse", + }, +) + + +class CreateMigrationWorkflowRequest(proto.Message): + r"""Request to create a migration workflow resource. + + Attributes: + parent (str): + Required. The name of the project to which this migration + workflow belongs. Example: ``projects/foo/locations/bar`` + migration_workflow (google.cloud.bigquery_migration_v2.types.MigrationWorkflow): + Required. The migration workflow to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + migration_workflow: migration_entities.MigrationWorkflow = proto.Field( + proto.MESSAGE, + number=2, + message=migration_entities.MigrationWorkflow, + ) + + +class GetMigrationWorkflowRequest(proto.Message): + r"""A request to get a previously created migration workflow. + + Attributes: + name (str): + Required. The unique identifier for the migration workflow. + Example: ``projects/123/locations/us/workflows/1234`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be retrieved. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListMigrationWorkflowsRequest(proto.Message): + r"""A request to list previously created migration workflows. + + Attributes: + parent (str): + Required. The project and location of the migration + workflows to list. Example: ``projects/123/locations/us`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be retrieved. + page_size (int): + The maximum number of migration workflows to + return. The service may return fewer than this + number. + page_token (str): + A page token, received from previous + ``ListMigrationWorkflows`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListMigrationWorkflows`` must match the call that provided + the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListMigrationWorkflowsResponse(proto.Message): + r"""Response object for a ``ListMigrationWorkflows`` call. + + Attributes: + migration_workflows (MutableSequence[google.cloud.bigquery_migration_v2.types.MigrationWorkflow]): + The migration workflows for the specified + project / location. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + migration_workflows: MutableSequence[ + migration_entities.MigrationWorkflow + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=migration_entities.MigrationWorkflow, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteMigrationWorkflowRequest(proto.Message): + r"""A request to delete a previously created migration workflow. + + Attributes: + name (str): + Required. The unique identifier for the migration workflow. + Example: ``projects/123/locations/us/workflows/1234`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class StartMigrationWorkflowRequest(proto.Message): + r"""A request to start a previously created migration workflow. + + Attributes: + name (str): + Required. The unique identifier for the migration workflow. + Example: ``projects/123/locations/us/workflows/1234`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetMigrationSubtaskRequest(proto.Message): + r"""A request to get a previously created migration subtasks. + + Attributes: + name (str): + Required. The unique identifier for the migration subtask. + Example: + ``projects/123/locations/us/workflows/1234/subtasks/543`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to be retrieved. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListMigrationSubtasksRequest(proto.Message): + r"""A request to list previously created migration subtasks. + + Attributes: + parent (str): + Required. The migration task of the subtasks to list. + Example: ``projects/123/locations/us/workflows/1234`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to be retrieved. + page_size (int): + Optional. The maximum number of migration + tasks to return. The service may return fewer + than this number. + page_token (str): + Optional. A page token, received from previous + ``ListMigrationSubtasks`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListMigrationSubtasks`` must match the call that provided + the page token. + filter (str): + Optional. The filter to apply. This can be used to get the + subtasks of a specific tasks in a workflow, e.g. + ``migration_task = "ab012"`` where ``"ab012"`` is the task + ID (not the name in the named map). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMigrationSubtasksResponse(proto.Message): + r"""Response object for a ``ListMigrationSubtasks`` call. + + Attributes: + migration_subtasks (MutableSequence[google.cloud.bigquery_migration_v2.types.MigrationSubtask]): + The migration subtasks for the specified + task. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + migration_subtasks: MutableSequence[ + migration_entities.MigrationSubtask + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=migration_entities.MigrationSubtask, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/translation_config.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/translation_config.py new file mode 100644 index 000000000000..7488e3b9d8e6 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/types/translation_config.py @@ -0,0 +1,539 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2", + manifest={ + "TranslationConfigDetails", + "Dialect", + "BigQueryDialect", + "HiveQLDialect", + "RedshiftDialect", + "TeradataDialect", + "OracleDialect", + "SparkSQLDialect", + "SnowflakeDialect", + "NetezzaDialect", + "AzureSynapseDialect", + "VerticaDialect", + "SQLServerDialect", + "PostgresqlDialect", + "PrestoDialect", + "MySQLDialect", + "ObjectNameMappingList", + "ObjectNameMapping", + "NameMappingKey", + "NameMappingValue", + "SourceEnv", + }, +) + + +class TranslationConfigDetails(proto.Message): + r"""The translation config to capture necessary settings for a + translation task and subtask. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_source_path (str): + The Cloud Storage path for a directory of + files to translate in a task. + + This field is a member of `oneof`_ ``source_location``. + gcs_target_path (str): + The Cloud Storage path to write back the + corresponding input files to. + + This field is a member of `oneof`_ ``target_location``. + name_mapping_list (google.cloud.bigquery_migration_v2.types.ObjectNameMappingList): + The mapping of objects to their desired + output names in list form. + + This field is a member of `oneof`_ ``output_name_mapping``. + source_dialect (google.cloud.bigquery_migration_v2.types.Dialect): + The dialect of the input files. + target_dialect (google.cloud.bigquery_migration_v2.types.Dialect): + The target dialect for the engine to + translate the input to. + source_env (google.cloud.bigquery_migration_v2.types.SourceEnv): + The default source environment values for the + translation. + request_source (str): + The indicator to show translation request + initiator. + """ + + gcs_source_path: str = proto.Field( + proto.STRING, + number=1, + oneof="source_location", + ) + gcs_target_path: str = proto.Field( + proto.STRING, + number=2, + oneof="target_location", + ) + name_mapping_list: "ObjectNameMappingList" = proto.Field( + proto.MESSAGE, + number=5, + oneof="output_name_mapping", + message="ObjectNameMappingList", + ) + source_dialect: "Dialect" = proto.Field( + proto.MESSAGE, + number=3, + message="Dialect", + ) + target_dialect: "Dialect" = proto.Field( + proto.MESSAGE, + number=4, + message="Dialect", + ) + source_env: "SourceEnv" = proto.Field( + proto.MESSAGE, + number=6, + message="SourceEnv", + ) + request_source: str = proto.Field( + proto.STRING, + number=8, + ) + + +class Dialect(proto.Message): + r"""The possible dialect options for translation. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bigquery_dialect (google.cloud.bigquery_migration_v2.types.BigQueryDialect): + The BigQuery dialect + + This field is a member of `oneof`_ ``dialect_value``. + hiveql_dialect (google.cloud.bigquery_migration_v2.types.HiveQLDialect): + The HiveQL dialect + + This field is a member of `oneof`_ ``dialect_value``. + redshift_dialect (google.cloud.bigquery_migration_v2.types.RedshiftDialect): + The Redshift dialect + + This field is a member of `oneof`_ ``dialect_value``. + teradata_dialect (google.cloud.bigquery_migration_v2.types.TeradataDialect): + The Teradata dialect + + This field is a member of `oneof`_ ``dialect_value``. + oracle_dialect (google.cloud.bigquery_migration_v2.types.OracleDialect): + The Oracle dialect + + This field is a member of `oneof`_ ``dialect_value``. + sparksql_dialect (google.cloud.bigquery_migration_v2.types.SparkSQLDialect): + The SparkSQL dialect + + This field is a member of `oneof`_ ``dialect_value``. + snowflake_dialect (google.cloud.bigquery_migration_v2.types.SnowflakeDialect): + The Snowflake dialect + + This field is a member of `oneof`_ ``dialect_value``. + netezza_dialect (google.cloud.bigquery_migration_v2.types.NetezzaDialect): + The Netezza dialect + + This field is a member of `oneof`_ ``dialect_value``. + azure_synapse_dialect (google.cloud.bigquery_migration_v2.types.AzureSynapseDialect): + The Azure Synapse dialect + + This field is a member of `oneof`_ ``dialect_value``. + vertica_dialect (google.cloud.bigquery_migration_v2.types.VerticaDialect): + The Vertica dialect + + This field is a member of `oneof`_ ``dialect_value``. + sql_server_dialect (google.cloud.bigquery_migration_v2.types.SQLServerDialect): + The SQL Server dialect + + This field is a member of `oneof`_ ``dialect_value``. + postgresql_dialect (google.cloud.bigquery_migration_v2.types.PostgresqlDialect): + The Postgresql dialect + + This field is a member of `oneof`_ ``dialect_value``. + presto_dialect (google.cloud.bigquery_migration_v2.types.PrestoDialect): + The Presto dialect + + This field is a member of `oneof`_ ``dialect_value``. + mysql_dialect (google.cloud.bigquery_migration_v2.types.MySQLDialect): + The MySQL dialect + + This field is a member of `oneof`_ ``dialect_value``. + """ + + bigquery_dialect: "BigQueryDialect" = proto.Field( + proto.MESSAGE, + number=1, + oneof="dialect_value", + message="BigQueryDialect", + ) + hiveql_dialect: "HiveQLDialect" = proto.Field( + proto.MESSAGE, + number=2, + oneof="dialect_value", + message="HiveQLDialect", + ) + redshift_dialect: "RedshiftDialect" = proto.Field( + proto.MESSAGE, + number=3, + oneof="dialect_value", + message="RedshiftDialect", + ) + teradata_dialect: "TeradataDialect" = proto.Field( + proto.MESSAGE, + number=4, + oneof="dialect_value", + message="TeradataDialect", + ) + oracle_dialect: "OracleDialect" = proto.Field( + proto.MESSAGE, + number=5, + oneof="dialect_value", + message="OracleDialect", + ) + sparksql_dialect: "SparkSQLDialect" = proto.Field( + proto.MESSAGE, + number=6, + oneof="dialect_value", + message="SparkSQLDialect", + ) + snowflake_dialect: "SnowflakeDialect" = proto.Field( + proto.MESSAGE, + number=7, + oneof="dialect_value", + message="SnowflakeDialect", + ) + netezza_dialect: "NetezzaDialect" = proto.Field( + proto.MESSAGE, + number=8, + oneof="dialect_value", + message="NetezzaDialect", + ) + azure_synapse_dialect: "AzureSynapseDialect" = proto.Field( + proto.MESSAGE, + number=9, + oneof="dialect_value", + message="AzureSynapseDialect", + ) + vertica_dialect: "VerticaDialect" = proto.Field( + proto.MESSAGE, + number=10, + oneof="dialect_value", + message="VerticaDialect", + ) + sql_server_dialect: "SQLServerDialect" = proto.Field( + proto.MESSAGE, + number=11, + oneof="dialect_value", + message="SQLServerDialect", + ) + postgresql_dialect: "PostgresqlDialect" = proto.Field( + proto.MESSAGE, + number=12, + oneof="dialect_value", + message="PostgresqlDialect", + ) + presto_dialect: "PrestoDialect" = proto.Field( + proto.MESSAGE, + number=13, + oneof="dialect_value", + message="PrestoDialect", + ) + mysql_dialect: "MySQLDialect" = proto.Field( + proto.MESSAGE, + number=14, + oneof="dialect_value", + message="MySQLDialect", + ) + + +class BigQueryDialect(proto.Message): + r"""The dialect definition for BigQuery.""" + + +class HiveQLDialect(proto.Message): + r"""The dialect definition for HiveQL.""" + + +class RedshiftDialect(proto.Message): + r"""The dialect definition for Redshift.""" + + +class TeradataDialect(proto.Message): + r"""The dialect definition for Teradata. + + Attributes: + mode (google.cloud.bigquery_migration_v2.types.TeradataDialect.Mode): + Which Teradata sub-dialect mode the user + specifies. + """ + + class Mode(proto.Enum): + r"""The sub-dialect options for Teradata. + + Values: + MODE_UNSPECIFIED (0): + Unspecified mode. + SQL (1): + Teradata SQL mode. + BTEQ (2): + BTEQ mode (which includes SQL). + """ + MODE_UNSPECIFIED = 0 + SQL = 1 + BTEQ = 2 + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + enum=Mode, + ) + + +class OracleDialect(proto.Message): + r"""The dialect definition for Oracle.""" + + +class SparkSQLDialect(proto.Message): + r"""The dialect definition for SparkSQL.""" + + +class SnowflakeDialect(proto.Message): + r"""The dialect definition for Snowflake.""" + + +class NetezzaDialect(proto.Message): + r"""The dialect definition for Netezza.""" + + +class AzureSynapseDialect(proto.Message): + r"""The dialect definition for Azure Synapse.""" + + +class VerticaDialect(proto.Message): + r"""The dialect definition for Vertica.""" + + +class SQLServerDialect(proto.Message): + r"""The dialect definition for SQL Server.""" + + +class PostgresqlDialect(proto.Message): + r"""The dialect definition for Postgresql.""" + + +class PrestoDialect(proto.Message): + r"""The dialect definition for Presto.""" + + +class MySQLDialect(proto.Message): + r"""The dialect definition for MySQL.""" + + +class ObjectNameMappingList(proto.Message): + r"""Represents a map of name mappings using a list of key:value + proto messages of existing name to desired output name. + + Attributes: + name_map (MutableSequence[google.cloud.bigquery_migration_v2.types.ObjectNameMapping]): + The elements of the object name map. + """ + + name_map: MutableSequence["ObjectNameMapping"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ObjectNameMapping", + ) + + +class ObjectNameMapping(proto.Message): + r"""Represents a key-value pair of NameMappingKey to + NameMappingValue to represent the mapping of SQL names from the + input value to desired output. + + Attributes: + source (google.cloud.bigquery_migration_v2.types.NameMappingKey): + The name of the object in source that is + being mapped. + target (google.cloud.bigquery_migration_v2.types.NameMappingValue): + The desired target name of the object that is + being mapped. + """ + + source: "NameMappingKey" = proto.Field( + proto.MESSAGE, + number=1, + message="NameMappingKey", + ) + target: "NameMappingValue" = proto.Field( + proto.MESSAGE, + number=2, + message="NameMappingValue", + ) + + +class NameMappingKey(proto.Message): + r"""The potential components of a full name mapping that will be + mapped during translation in the source data warehouse. + + Attributes: + type_ (google.cloud.bigquery_migration_v2.types.NameMappingKey.Type): + The type of object that is being mapped. + database (str): + The database name (BigQuery project ID + equivalent in the source data warehouse). + schema (str): + The schema name (BigQuery dataset equivalent + in the source data warehouse). + relation (str): + The relation name (BigQuery table or view + equivalent in the source data warehouse). + attribute (str): + The attribute name (BigQuery column + equivalent in the source data warehouse). + """ + + class Type(proto.Enum): + r"""The type of the object that is being mapped. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified name mapping type. + DATABASE (1): + The object being mapped is a database. + SCHEMA (2): + The object being mapped is a schema. + RELATION (3): + The object being mapped is a relation. + ATTRIBUTE (4): + The object being mapped is an attribute. + RELATION_ALIAS (5): + The object being mapped is a relation alias. + ATTRIBUTE_ALIAS (6): + The object being mapped is a an attribute + alias. + FUNCTION (7): + The object being mapped is a function. + """ + TYPE_UNSPECIFIED = 0 + DATABASE = 1 + SCHEMA = 2 + RELATION = 3 + ATTRIBUTE = 4 + RELATION_ALIAS = 5 + ATTRIBUTE_ALIAS = 6 + FUNCTION = 7 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + database: str = proto.Field( + proto.STRING, + number=2, + ) + schema: str = proto.Field( + proto.STRING, + number=3, + ) + relation: str = proto.Field( + proto.STRING, + number=4, + ) + attribute: str = proto.Field( + proto.STRING, + number=5, + ) + + +class NameMappingValue(proto.Message): + r"""The potential components of a full name mapping that will be + mapped during translation in the target data warehouse. + + Attributes: + database (str): + The database name (BigQuery project ID + equivalent in the target data warehouse). + schema (str): + The schema name (BigQuery dataset equivalent + in the target data warehouse). + relation (str): + The relation name (BigQuery table or view + equivalent in the target data warehouse). + attribute (str): + The attribute name (BigQuery column + equivalent in the target data warehouse). + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + schema: str = proto.Field( + proto.STRING, + number=2, + ) + relation: str = proto.Field( + proto.STRING, + number=3, + ) + attribute: str = proto.Field( + proto.STRING, + number=4, + ) + + +class SourceEnv(proto.Message): + r"""Represents the default source environment values for the + translation. + + Attributes: + default_database (str): + The default database name to fully qualify + SQL objects when their database name is missing. + schema_search_path (MutableSequence[str]): + The schema search path. When SQL objects are + missing schema name, translation engine will + search through this list to find the value. + """ + + default_database: str = proto.Field( + proto.STRING, + number=1, + ) + schema_search_path: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/__init__.py new file mode 100644 index 000000000000..f4fcb594e7cb --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/__init__.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.migration_service import ( + MigrationServiceAsyncClient, + MigrationServiceClient, +) +from .services.sql_translation_service import ( + SqlTranslationServiceAsyncClient, + SqlTranslationServiceClient, +) +from .types.assessment_task import ( + AssessmentOrchestrationResultDetails, + AssessmentTaskDetails, +) +from .types.migration_entities import ( + MigrationSubtask, + MigrationTask, + MigrationTaskOrchestrationResult, + MigrationWorkflow, +) +from .types.migration_error_details import ( + ErrorDetail, + ErrorLocation, + ResourceErrorDetail, +) +from .types.migration_metrics import Point, TimeInterval, TimeSeries, TypedValue +from .types.migration_service import ( + CreateMigrationWorkflowRequest, + DeleteMigrationWorkflowRequest, + GetMigrationSubtaskRequest, + GetMigrationWorkflowRequest, + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + StartMigrationWorkflowRequest, +) +from .types.translation_service import ( + SqlTranslationError, + SqlTranslationErrorDetail, + SqlTranslationWarning, + TranslateQueryRequest, + TranslateQueryResponse, +) +from .types.translation_task import ( + BteqOptions, + DatasetReference, + Filter, + IdentifierSettings, + TeradataOptions, + TranslationFileMapping, + TranslationTaskDetails, +) + +__all__ = ( + "MigrationServiceAsyncClient", + "SqlTranslationServiceAsyncClient", + "AssessmentOrchestrationResultDetails", + "AssessmentTaskDetails", + "BteqOptions", + "CreateMigrationWorkflowRequest", + "DatasetReference", + "DeleteMigrationWorkflowRequest", + "ErrorDetail", + "ErrorLocation", + "Filter", + "GetMigrationSubtaskRequest", + "GetMigrationWorkflowRequest", + "IdentifierSettings", + "ListMigrationSubtasksRequest", + "ListMigrationSubtasksResponse", + "ListMigrationWorkflowsRequest", + "ListMigrationWorkflowsResponse", + "MigrationServiceClient", + "MigrationSubtask", + "MigrationTask", + "MigrationTaskOrchestrationResult", + "MigrationWorkflow", + "Point", + "ResourceErrorDetail", + "SqlTranslationError", + "SqlTranslationErrorDetail", + "SqlTranslationServiceClient", + "SqlTranslationWarning", + "StartMigrationWorkflowRequest", + "TeradataOptions", + "TimeInterval", + "TimeSeries", + "TranslateQueryRequest", + "TranslateQueryResponse", + "TranslationFileMapping", + "TranslationTaskDetails", + "TypedValue", +) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_metadata.json b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_metadata.json new file mode 100644 index 000000000000..9eb915ecf56d --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_metadata.json @@ -0,0 +1,117 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bigquery_migration_v2alpha", + "protoPackage": "google.cloud.bigquery.migration.v2alpha", + "schema": "1.0", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "create_migration_workflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "delete_migration_workflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "get_migration_subtask" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "get_migration_workflow" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "list_migration_subtasks" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "list_migration_workflows" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "start_migration_workflow" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MigrationServiceAsyncClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "create_migration_workflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "delete_migration_workflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "get_migration_subtask" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "get_migration_workflow" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "list_migration_subtasks" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "list_migration_workflows" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "start_migration_workflow" + ] + } + } + } + } + }, + "SqlTranslationService": { + "clients": { + "grpc": { + "libraryClient": "SqlTranslationServiceClient", + "rpcs": { + "TranslateQuery": { + "methods": [ + "translate_query" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SqlTranslationServiceAsyncClient", + "rpcs": { + "TranslateQuery": { + "methods": [ + "translate_query" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py new file mode 100644 index 000000000000..c48c3cf53d40 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.11.2" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/py.typed b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/py.typed new file mode 100644 index 000000000000..fcbd0b586dd2 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-migration package uses inline types. diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/__init__.py new file mode 100644 index 000000000000..c56e03eca2b3 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MigrationServiceAsyncClient +from .client import MigrationServiceClient + +__all__ = ( + "MigrationServiceClient", + "MigrationServiceAsyncClient", +) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py new file mode 100644 index 000000000000..e0919b58dfb5 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py @@ -0,0 +1,1044 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import error_details_pb2 # type: ignore + +from google.cloud.bigquery_migration_v2alpha.services.migration_service import pagers +from google.cloud.bigquery_migration_v2alpha.types import ( + migration_entities, + migration_error_details, + migration_metrics, + migration_service, +) + +from .client import MigrationServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, MigrationServiceTransport +from .transports.grpc_asyncio import MigrationServiceGrpcAsyncIOTransport + + +class MigrationServiceAsyncClient: + """Service to handle EDW migrations.""" + + _client: MigrationServiceClient + + DEFAULT_ENDPOINT = MigrationServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MigrationServiceClient.DEFAULT_MTLS_ENDPOINT + + migration_subtask_path = staticmethod(MigrationServiceClient.migration_subtask_path) + parse_migration_subtask_path = staticmethod( + MigrationServiceClient.parse_migration_subtask_path + ) + migration_workflow_path = staticmethod( + MigrationServiceClient.migration_workflow_path + ) + parse_migration_workflow_path = staticmethod( + MigrationServiceClient.parse_migration_workflow_path + ) + common_billing_account_path = staticmethod( + MigrationServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MigrationServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(MigrationServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + MigrationServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + MigrationServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + MigrationServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(MigrationServiceClient.common_project_path) + parse_common_project_path = staticmethod( + MigrationServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(MigrationServiceClient.common_location_path) + parse_common_location_path = staticmethod( + MigrationServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MigrationServiceAsyncClient: The constructed client. + """ + return MigrationServiceClient.from_service_account_info.__func__(MigrationServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MigrationServiceAsyncClient: The constructed client. + """ + return MigrationServiceClient.from_service_account_file.__func__(MigrationServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MigrationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MigrationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MigrationServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(MigrationServiceClient).get_transport_class, type(MigrationServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MigrationServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the migration service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MigrationServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MigrationServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_migration_workflow( + self, + request: Optional[ + Union[migration_service.CreateMigrationWorkflowRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + migration_workflow: Optional[migration_entities.MigrationWorkflow] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationWorkflow: + r"""Creates a migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + async def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_migration_workflow(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.CreateMigrationWorkflowRequest, dict]]): + The request object. Request to create a migration + workflow resource. + parent (:class:`str`): + Required. The name of the project to which this + migration workflow belongs. Example: + ``projects/foo/locations/bar`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_workflow (:class:`google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow`): + Required. The migration workflow to + create. + + This corresponds to the ``migration_workflow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow: + A migration workflow which specifies + what needs to be done for an EDW + migration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, migration_workflow]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.CreateMigrationWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if migration_workflow is not None: + request.migration_workflow = migration_workflow + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_migration_workflow, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_migration_workflow( + self, + request: Optional[ + Union[migration_service.GetMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationWorkflow: + r"""Gets a previously created migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + async def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_workflow(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationWorkflowRequest, dict]]): + The request object. A request to get a previously created + migration workflow. + name (:class:`str`): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow: + A migration workflow which specifies + what needs to be done for an EDW + migration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.GetMigrationWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_migration_workflow, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_migration_workflows( + self, + request: Optional[ + Union[migration_service.ListMigrationWorkflowsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationWorkflowsAsyncPager: + r"""Lists previously created migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + async def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest, dict]]): + The request object. A request to list previously created + migration workflows. + parent (:class:`str`): + Required. The project and location of the migration + workflows to list. Example: + ``projects/123/locations/us`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationWorkflowsAsyncPager: + Response object for a ListMigrationWorkflows call. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.ListMigrationWorkflowsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_migration_workflows, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMigrationWorkflowsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_migration_workflow( + self, + request: Optional[ + Union[migration_service.DeleteMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a migration workflow by name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + async def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + await client.delete_migration_workflow(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.DeleteMigrationWorkflowRequest, dict]]): + The request object. A request to delete a previously + created migration workflow. + name (:class:`str`): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.DeleteMigrationWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_migration_workflow, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def start_migration_workflow( + self, + request: Optional[ + Union[migration_service.StartMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts a previously created migration workflow. I.e., + the state transitions from DRAFT to RUNNING. This is a + no-op if the state is already RUNNING. An error will be + signaled if the state is anything other than DRAFT or + RUNNING. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + async def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + await client.start_migration_workflow(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.StartMigrationWorkflowRequest, dict]]): + The request object. A request to start a previously + created migration workflow. + name (:class:`str`): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.StartMigrationWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_migration_workflow, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_migration_subtask( + self, + request: Optional[ + Union[migration_service.GetMigrationSubtaskRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationSubtask: + r"""Gets a previously created migration subtask. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + async def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_subtask(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationSubtaskRequest, dict]]): + The request object. A request to get a previously created + migration subtasks. + name (:class:`str`): + Required. The unique identifier for the migration + subtask. Example: + ``projects/123/locations/us/workflows/1234/subtasks/543`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask: + A subtask for a migration which + carries details about the configuration + of the subtask. The content of the + details should not matter to the end + user, but is a contract between the + subtask creator and subtask worker. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.GetMigrationSubtaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_migration_subtask, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_migration_subtasks( + self, + request: Optional[ + Union[migration_service.ListMigrationSubtasksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationSubtasksAsyncPager: + r"""Lists previously created migration subtasks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + async def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest, dict]]): + The request object. A request to list previously created + migration subtasks. + parent (:class:`str`): + Required. The migration task of the subtasks to list. + Example: ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationSubtasksAsyncPager: + Response object for a ListMigrationSubtasks call. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = migration_service.ListMigrationSubtasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_migration_subtasks, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMigrationSubtasksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "MigrationServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MigrationServiceAsyncClient",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py new file mode 100644 index 000000000000..00fbce5f72fe --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py @@ -0,0 +1,1253 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import error_details_pb2 # type: ignore + +from google.cloud.bigquery_migration_v2alpha.services.migration_service import pagers +from google.cloud.bigquery_migration_v2alpha.types import ( + migration_entities, + migration_error_details, + migration_metrics, + migration_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, MigrationServiceTransport +from .transports.grpc import MigrationServiceGrpcTransport +from .transports.grpc_asyncio import MigrationServiceGrpcAsyncIOTransport + + +class MigrationServiceClientMeta(type): + """Metaclass for the MigrationService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[MigrationServiceTransport]] + _transport_registry["grpc"] = MigrationServiceGrpcTransport + _transport_registry["grpc_asyncio"] = MigrationServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MigrationServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MigrationServiceClient(metaclass=MigrationServiceClientMeta): + """Service to handle EDW migrations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "bigquerymigration.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MigrationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MigrationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MigrationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MigrationServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def migration_subtask_path( + project: str, + location: str, + workflow: str, + subtask: str, + ) -> str: + """Returns a fully-qualified migration_subtask string.""" + return "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}".format( + project=project, + location=location, + workflow=workflow, + subtask=subtask, + ) + + @staticmethod + def parse_migration_subtask_path(path: str) -> Dict[str, str]: + """Parses a migration_subtask path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)/subtasks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def migration_workflow_path( + project: str, + location: str, + workflow: str, + ) -> str: + """Returns a fully-qualified migration_workflow string.""" + return "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, + location=location, + workflow=workflow, + ) + + @staticmethod + def parse_migration_workflow_path(path: str) -> Dict[str, str]: + """Parses a migration_workflow path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MigrationServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the migration service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MigrationServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MigrationServiceTransport): + # transport is a MigrationServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_migration_workflow( + self, + request: Optional[ + Union[migration_service.CreateMigrationWorkflowRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + migration_workflow: Optional[migration_entities.MigrationWorkflow] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationWorkflow: + r"""Creates a migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_migration_workflow(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_migration_v2alpha.types.CreateMigrationWorkflowRequest, dict]): + The request object. Request to create a migration + workflow resource. + parent (str): + Required. The name of the project to which this + migration workflow belongs. Example: + ``projects/foo/locations/bar`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + migration_workflow (google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow): + Required. The migration workflow to + create. + + This corresponds to the ``migration_workflow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow: + A migration workflow which specifies + what needs to be done for an EDW + migration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, migration_workflow]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.CreateMigrationWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.CreateMigrationWorkflowRequest): + request = migration_service.CreateMigrationWorkflowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if migration_workflow is not None: + request.migration_workflow = migration_workflow + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_migration_workflow + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_migration_workflow( + self, + request: Optional[ + Union[migration_service.GetMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationWorkflow: + r"""Gets a previously created migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_workflow(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationWorkflowRequest, dict]): + The request object. A request to get a previously created + migration workflow. + name (str): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow: + A migration workflow which specifies + what needs to be done for an EDW + migration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.GetMigrationWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.GetMigrationWorkflowRequest): + request = migration_service.GetMigrationWorkflowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_migration_workflow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_migration_workflows( + self, + request: Optional[ + Union[migration_service.ListMigrationWorkflowsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationWorkflowsPager: + r"""Lists previously created migration workflow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest, dict]): + The request object. A request to list previously created + migration workflows. + parent (str): + Required. The project and location of the migration + workflows to list. Example: + ``projects/123/locations/us`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationWorkflowsPager: + Response object for a ListMigrationWorkflows call. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.ListMigrationWorkflowsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.ListMigrationWorkflowsRequest): + request = migration_service.ListMigrationWorkflowsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_migration_workflows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMigrationWorkflowsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_migration_workflow( + self, + request: Optional[ + Union[migration_service.DeleteMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a migration workflow by name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.delete_migration_workflow(request=request) + + Args: + request (Union[google.cloud.bigquery_migration_v2alpha.types.DeleteMigrationWorkflowRequest, dict]): + The request object. A request to delete a previously + created migration workflow. + name (str): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.DeleteMigrationWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.DeleteMigrationWorkflowRequest): + request = migration_service.DeleteMigrationWorkflowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_migration_workflow + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def start_migration_workflow( + self, + request: Optional[ + Union[migration_service.StartMigrationWorkflowRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts a previously created migration workflow. I.e., + the state transitions from DRAFT to RUNNING. This is a + no-op if the state is already RUNNING. An error will be + signaled if the state is anything other than DRAFT or + RUNNING. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.start_migration_workflow(request=request) + + Args: + request (Union[google.cloud.bigquery_migration_v2alpha.types.StartMigrationWorkflowRequest, dict]): + The request object. A request to start a previously + created migration workflow. + name (str): + Required. The unique identifier for the migration + workflow. Example: + ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.StartMigrationWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.StartMigrationWorkflowRequest): + request = migration_service.StartMigrationWorkflowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_migration_workflow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_migration_subtask( + self, + request: Optional[ + Union[migration_service.GetMigrationSubtaskRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> migration_entities.MigrationSubtask: + r"""Gets a previously created migration subtask. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_subtask(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationSubtaskRequest, dict]): + The request object. A request to get a previously created + migration subtasks. + name (str): + Required. The unique identifier for the migration + subtask. Example: + ``projects/123/locations/us/workflows/1234/subtasks/543`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask: + A subtask for a migration which + carries details about the configuration + of the subtask. The content of the + details should not matter to the end + user, but is a contract between the + subtask creator and subtask worker. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.GetMigrationSubtaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.GetMigrationSubtaskRequest): + request = migration_service.GetMigrationSubtaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_migration_subtask] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_migration_subtasks( + self, + request: Optional[ + Union[migration_service.ListMigrationSubtasksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMigrationSubtasksPager: + r"""Lists previously created migration subtasks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest, dict]): + The request object. A request to list previously created + migration subtasks. + parent (str): + Required. The migration task of the subtasks to list. + Example: ``projects/123/locations/us/workflows/1234`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationSubtasksPager: + Response object for a ListMigrationSubtasks call. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a migration_service.ListMigrationSubtasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, migration_service.ListMigrationSubtasksRequest): + request = migration_service.ListMigrationSubtasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_migration_subtasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMigrationSubtasksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MigrationServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MigrationServiceClient",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/pagers.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/pagers.py new file mode 100644 index 000000000000..c0d478c7e81e --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/pagers.py @@ -0,0 +1,294 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.bigquery_migration_v2alpha.types import ( + migration_entities, + migration_service, +) + + +class ListMigrationWorkflowsPager: + """A pager for iterating through ``list_migration_workflows`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``migration_workflows`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMigrationWorkflows`` requests and continue to iterate + through the ``migration_workflows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., migration_service.ListMigrationWorkflowsResponse], + request: migration_service.ListMigrationWorkflowsRequest, + response: migration_service.ListMigrationWorkflowsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest): + The initial request object. + response (google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = migration_service.ListMigrationWorkflowsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[migration_service.ListMigrationWorkflowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[migration_entities.MigrationWorkflow]: + for page in self.pages: + yield from page.migration_workflows + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMigrationWorkflowsAsyncPager: + """A pager for iterating through ``list_migration_workflows`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``migration_workflows`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMigrationWorkflows`` requests and continue to iterate + through the ``migration_workflows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[migration_service.ListMigrationWorkflowsResponse] + ], + request: migration_service.ListMigrationWorkflowsRequest, + response: migration_service.ListMigrationWorkflowsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest): + The initial request object. + response (google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = migration_service.ListMigrationWorkflowsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[migration_service.ListMigrationWorkflowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[migration_entities.MigrationWorkflow]: + async def async_generator(): + async for page in self.pages: + for response in page.migration_workflows: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMigrationSubtasksPager: + """A pager for iterating through ``list_migration_subtasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``migration_subtasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMigrationSubtasks`` requests and continue to iterate + through the ``migration_subtasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., migration_service.ListMigrationSubtasksResponse], + request: migration_service.ListMigrationSubtasksRequest, + response: migration_service.ListMigrationSubtasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest): + The initial request object. + response (google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = migration_service.ListMigrationSubtasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[migration_service.ListMigrationSubtasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[migration_entities.MigrationSubtask]: + for page in self.pages: + yield from page.migration_subtasks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMigrationSubtasksAsyncPager: + """A pager for iterating through ``list_migration_subtasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``migration_subtasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMigrationSubtasks`` requests and continue to iterate + through the ``migration_subtasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[migration_service.ListMigrationSubtasksResponse] + ], + request: migration_service.ListMigrationSubtasksRequest, + response: migration_service.ListMigrationSubtasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest): + The initial request object. + response (google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = migration_service.ListMigrationSubtasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[migration_service.ListMigrationSubtasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[migration_entities.MigrationSubtask]: + async def async_generator(): + async for page in self.pages: + for response in page.migration_subtasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/__init__.py new file mode 100644 index 000000000000..b4ab5eab1913 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MigrationServiceTransport +from .grpc import MigrationServiceGrpcTransport +from .grpc_asyncio import MigrationServiceGrpcAsyncIOTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MigrationServiceTransport]] +_transport_registry["grpc"] = MigrationServiceGrpcTransport +_transport_registry["grpc_asyncio"] = MigrationServiceGrpcAsyncIOTransport + +__all__ = ( + "MigrationServiceTransport", + "MigrationServiceGrpcTransport", + "MigrationServiceGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py new file mode 100644 index 000000000000..8b71e3ea2f46 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version +from google.cloud.bigquery_migration_v2alpha.types import ( + migration_entities, + migration_service, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class MigrationServiceTransport(abc.ABC): + """Abstract transport class for MigrationService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "bigquerymigration.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_migration_workflow: gapic_v1.method.wrap_method( + self.create_migration_workflow, + default_timeout=60.0, + client_info=client_info, + ), + self.get_migration_workflow: gapic_v1.method.wrap_method( + self.get_migration_workflow, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.list_migration_workflows: gapic_v1.method.wrap_method( + self.list_migration_workflows, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.delete_migration_workflow: gapic_v1.method.wrap_method( + self.delete_migration_workflow, + default_timeout=60.0, + client_info=client_info, + ), + self.start_migration_workflow: gapic_v1.method.wrap_method( + self.start_migration_workflow, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.get_migration_subtask: gapic_v1.method.wrap_method( + self.get_migration_subtask, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + self.list_migration_subtasks: gapic_v1.method.wrap_method( + self.list_migration_subtasks, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_migration_workflow( + self, + ) -> Callable[ + [migration_service.CreateMigrationWorkflowRequest], + Union[ + migration_entities.MigrationWorkflow, + Awaitable[migration_entities.MigrationWorkflow], + ], + ]: + raise NotImplementedError() + + @property + def get_migration_workflow( + self, + ) -> Callable[ + [migration_service.GetMigrationWorkflowRequest], + Union[ + migration_entities.MigrationWorkflow, + Awaitable[migration_entities.MigrationWorkflow], + ], + ]: + raise NotImplementedError() + + @property + def list_migration_workflows( + self, + ) -> Callable[ + [migration_service.ListMigrationWorkflowsRequest], + Union[ + migration_service.ListMigrationWorkflowsResponse, + Awaitable[migration_service.ListMigrationWorkflowsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_migration_workflow( + self, + ) -> Callable[ + [migration_service.DeleteMigrationWorkflowRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def start_migration_workflow( + self, + ) -> Callable[ + [migration_service.StartMigrationWorkflowRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_migration_subtask( + self, + ) -> Callable[ + [migration_service.GetMigrationSubtaskRequest], + Union[ + migration_entities.MigrationSubtask, + Awaitable[migration_entities.MigrationSubtask], + ], + ]: + raise NotImplementedError() + + @property + def list_migration_subtasks( + self, + ) -> Callable[ + [migration_service.ListMigrationSubtasksRequest], + Union[ + migration_service.ListMigrationSubtasksResponse, + Awaitable[migration_service.ListMigrationSubtasksResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MigrationServiceTransport",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py new file mode 100644 index 000000000000..3aeaacdf7875 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py @@ -0,0 +1,444 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.bigquery_migration_v2alpha.types import ( + migration_entities, + migration_service, +) + +from .base import DEFAULT_CLIENT_INFO, MigrationServiceTransport + + +class MigrationServiceGrpcTransport(MigrationServiceTransport): + """gRPC backend transport for MigrationService. + + Service to handle EDW migrations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_migration_workflow( + self, + ) -> Callable[ + [migration_service.CreateMigrationWorkflowRequest], + migration_entities.MigrationWorkflow, + ]: + r"""Return a callable for the create migration workflow method over gRPC. + + Creates a migration workflow. + + Returns: + Callable[[~.CreateMigrationWorkflowRequest], + ~.MigrationWorkflow]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_migration_workflow" not in self._stubs: + self._stubs["create_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/CreateMigrationWorkflow", + request_serializer=migration_service.CreateMigrationWorkflowRequest.serialize, + response_deserializer=migration_entities.MigrationWorkflow.deserialize, + ) + return self._stubs["create_migration_workflow"] + + @property + def get_migration_workflow( + self, + ) -> Callable[ + [migration_service.GetMigrationWorkflowRequest], + migration_entities.MigrationWorkflow, + ]: + r"""Return a callable for the get migration workflow method over gRPC. + + Gets a previously created migration workflow. + + Returns: + Callable[[~.GetMigrationWorkflowRequest], + ~.MigrationWorkflow]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_migration_workflow" not in self._stubs: + self._stubs["get_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationWorkflow", + request_serializer=migration_service.GetMigrationWorkflowRequest.serialize, + response_deserializer=migration_entities.MigrationWorkflow.deserialize, + ) + return self._stubs["get_migration_workflow"] + + @property + def list_migration_workflows( + self, + ) -> Callable[ + [migration_service.ListMigrationWorkflowsRequest], + migration_service.ListMigrationWorkflowsResponse, + ]: + r"""Return a callable for the list migration workflows method over gRPC. + + Lists previously created migration workflow. + + Returns: + Callable[[~.ListMigrationWorkflowsRequest], + ~.ListMigrationWorkflowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_migration_workflows" not in self._stubs: + self._stubs["list_migration_workflows"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationWorkflows", + request_serializer=migration_service.ListMigrationWorkflowsRequest.serialize, + response_deserializer=migration_service.ListMigrationWorkflowsResponse.deserialize, + ) + return self._stubs["list_migration_workflows"] + + @property + def delete_migration_workflow( + self, + ) -> Callable[[migration_service.DeleteMigrationWorkflowRequest], empty_pb2.Empty]: + r"""Return a callable for the delete migration workflow method over gRPC. + + Deletes a migration workflow by name. + + Returns: + Callable[[~.DeleteMigrationWorkflowRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_migration_workflow" not in self._stubs: + self._stubs["delete_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/DeleteMigrationWorkflow", + request_serializer=migration_service.DeleteMigrationWorkflowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_migration_workflow"] + + @property + def start_migration_workflow( + self, + ) -> Callable[[migration_service.StartMigrationWorkflowRequest], empty_pb2.Empty]: + r"""Return a callable for the start migration workflow method over gRPC. + + Starts a previously created migration workflow. I.e., + the state transitions from DRAFT to RUNNING. This is a + no-op if the state is already RUNNING. An error will be + signaled if the state is anything other than DRAFT or + RUNNING. + + Returns: + Callable[[~.StartMigrationWorkflowRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_migration_workflow" not in self._stubs: + self._stubs["start_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/StartMigrationWorkflow", + request_serializer=migration_service.StartMigrationWorkflowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["start_migration_workflow"] + + @property + def get_migration_subtask( + self, + ) -> Callable[ + [migration_service.GetMigrationSubtaskRequest], + migration_entities.MigrationSubtask, + ]: + r"""Return a callable for the get migration subtask method over gRPC. + + Gets a previously created migration subtask. + + Returns: + Callable[[~.GetMigrationSubtaskRequest], + ~.MigrationSubtask]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_migration_subtask" not in self._stubs: + self._stubs["get_migration_subtask"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationSubtask", + request_serializer=migration_service.GetMigrationSubtaskRequest.serialize, + response_deserializer=migration_entities.MigrationSubtask.deserialize, + ) + return self._stubs["get_migration_subtask"] + + @property + def list_migration_subtasks( + self, + ) -> Callable[ + [migration_service.ListMigrationSubtasksRequest], + migration_service.ListMigrationSubtasksResponse, + ]: + r"""Return a callable for the list migration subtasks method over gRPC. + + Lists previously created migration subtasks. + + Returns: + Callable[[~.ListMigrationSubtasksRequest], + ~.ListMigrationSubtasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_migration_subtasks" not in self._stubs: + self._stubs["list_migration_subtasks"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationSubtasks", + request_serializer=migration_service.ListMigrationSubtasksRequest.serialize, + response_deserializer=migration_service.ListMigrationSubtasksResponse.deserialize, + ) + return self._stubs["list_migration_subtasks"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MigrationServiceGrpcTransport",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..e4ff6dfe7e8f --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.bigquery_migration_v2alpha.types import ( + migration_entities, + migration_service, +) + +from .base import DEFAULT_CLIENT_INFO, MigrationServiceTransport +from .grpc import MigrationServiceGrpcTransport + + +class MigrationServiceGrpcAsyncIOTransport(MigrationServiceTransport): + """gRPC AsyncIO backend transport for MigrationService. + + Service to handle EDW migrations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_migration_workflow( + self, + ) -> Callable[ + [migration_service.CreateMigrationWorkflowRequest], + Awaitable[migration_entities.MigrationWorkflow], + ]: + r"""Return a callable for the create migration workflow method over gRPC. + + Creates a migration workflow. + + Returns: + Callable[[~.CreateMigrationWorkflowRequest], + Awaitable[~.MigrationWorkflow]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_migration_workflow" not in self._stubs: + self._stubs["create_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/CreateMigrationWorkflow", + request_serializer=migration_service.CreateMigrationWorkflowRequest.serialize, + response_deserializer=migration_entities.MigrationWorkflow.deserialize, + ) + return self._stubs["create_migration_workflow"] + + @property + def get_migration_workflow( + self, + ) -> Callable[ + [migration_service.GetMigrationWorkflowRequest], + Awaitable[migration_entities.MigrationWorkflow], + ]: + r"""Return a callable for the get migration workflow method over gRPC. + + Gets a previously created migration workflow. + + Returns: + Callable[[~.GetMigrationWorkflowRequest], + Awaitable[~.MigrationWorkflow]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_migration_workflow" not in self._stubs: + self._stubs["get_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationWorkflow", + request_serializer=migration_service.GetMigrationWorkflowRequest.serialize, + response_deserializer=migration_entities.MigrationWorkflow.deserialize, + ) + return self._stubs["get_migration_workflow"] + + @property + def list_migration_workflows( + self, + ) -> Callable[ + [migration_service.ListMigrationWorkflowsRequest], + Awaitable[migration_service.ListMigrationWorkflowsResponse], + ]: + r"""Return a callable for the list migration workflows method over gRPC. + + Lists previously created migration workflow. + + Returns: + Callable[[~.ListMigrationWorkflowsRequest], + Awaitable[~.ListMigrationWorkflowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_migration_workflows" not in self._stubs: + self._stubs["list_migration_workflows"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationWorkflows", + request_serializer=migration_service.ListMigrationWorkflowsRequest.serialize, + response_deserializer=migration_service.ListMigrationWorkflowsResponse.deserialize, + ) + return self._stubs["list_migration_workflows"] + + @property + def delete_migration_workflow( + self, + ) -> Callable[ + [migration_service.DeleteMigrationWorkflowRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete migration workflow method over gRPC. + + Deletes a migration workflow by name. + + Returns: + Callable[[~.DeleteMigrationWorkflowRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_migration_workflow" not in self._stubs: + self._stubs["delete_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/DeleteMigrationWorkflow", + request_serializer=migration_service.DeleteMigrationWorkflowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_migration_workflow"] + + @property + def start_migration_workflow( + self, + ) -> Callable[ + [migration_service.StartMigrationWorkflowRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the start migration workflow method over gRPC. + + Starts a previously created migration workflow. I.e., + the state transitions from DRAFT to RUNNING. This is a + no-op if the state is already RUNNING. An error will be + signaled if the state is anything other than DRAFT or + RUNNING. + + Returns: + Callable[[~.StartMigrationWorkflowRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_migration_workflow" not in self._stubs: + self._stubs["start_migration_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/StartMigrationWorkflow", + request_serializer=migration_service.StartMigrationWorkflowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["start_migration_workflow"] + + @property + def get_migration_subtask( + self, + ) -> Callable[ + [migration_service.GetMigrationSubtaskRequest], + Awaitable[migration_entities.MigrationSubtask], + ]: + r"""Return a callable for the get migration subtask method over gRPC. + + Gets a previously created migration subtask. + + Returns: + Callable[[~.GetMigrationSubtaskRequest], + Awaitable[~.MigrationSubtask]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_migration_subtask" not in self._stubs: + self._stubs["get_migration_subtask"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationSubtask", + request_serializer=migration_service.GetMigrationSubtaskRequest.serialize, + response_deserializer=migration_entities.MigrationSubtask.deserialize, + ) + return self._stubs["get_migration_subtask"] + + @property + def list_migration_subtasks( + self, + ) -> Callable[ + [migration_service.ListMigrationSubtasksRequest], + Awaitable[migration_service.ListMigrationSubtasksResponse], + ]: + r"""Return a callable for the list migration subtasks method over gRPC. + + Lists previously created migration subtasks. + + Returns: + Callable[[~.ListMigrationSubtasksRequest], + Awaitable[~.ListMigrationSubtasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_migration_subtasks" not in self._stubs: + self._stubs["list_migration_subtasks"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationSubtasks", + request_serializer=migration_service.ListMigrationSubtasksRequest.serialize, + response_deserializer=migration_service.ListMigrationSubtasksResponse.deserialize, + ) + return self._stubs["list_migration_subtasks"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("MigrationServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/__init__.py new file mode 100644 index 000000000000..d8cccbd363ff --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SqlTranslationServiceAsyncClient +from .client import SqlTranslationServiceClient + +__all__ = ( + "SqlTranslationServiceClient", + "SqlTranslationServiceAsyncClient", +) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py new file mode 100644 index 000000000000..ca9b5860f570 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py @@ -0,0 +1,357 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.bigquery_migration_v2alpha.types import translation_service + +from .client import SqlTranslationServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SqlTranslationServiceTransport +from .transports.grpc_asyncio import SqlTranslationServiceGrpcAsyncIOTransport + + +class SqlTranslationServiceAsyncClient: + """Provides other SQL dialects to GoogleSQL translation + operations. + """ + + _client: SqlTranslationServiceClient + + DEFAULT_ENDPOINT = SqlTranslationServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SqlTranslationServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + SqlTranslationServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SqlTranslationServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SqlTranslationServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SqlTranslationServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SqlTranslationServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SqlTranslationServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SqlTranslationServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SqlTranslationServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + SqlTranslationServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + SqlTranslationServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SqlTranslationServiceAsyncClient: The constructed client. + """ + return SqlTranslationServiceClient.from_service_account_info.__func__(SqlTranslationServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SqlTranslationServiceAsyncClient: The constructed client. + """ + return SqlTranslationServiceClient.from_service_account_file.__func__(SqlTranslationServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SqlTranslationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SqlTranslationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SqlTranslationServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SqlTranslationServiceClient).get_transport_class, + type(SqlTranslationServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SqlTranslationServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the sql translation service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SqlTranslationServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SqlTranslationServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def translate_query( + self, + request: Optional[ + Union[translation_service.TranslateQueryRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + source_dialect: Optional[ + translation_service.TranslateQueryRequest.SqlTranslationSourceDialect + ] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.TranslateQueryResponse: + r"""Translates input queries from source dialects to + GoogleSQL. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + async def sample_translate_query(): + # Create a client + client = bigquery_migration_v2alpha.SqlTranslationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.TranslateQueryRequest( + parent="parent_value", + source_dialect="TERADATA", + query="query_value", + ) + + # Make the request + response = await client.translate_query(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest, dict]]): + The request object. The request of translating a SQL + query to Standard SQL. + parent (:class:`str`): + Required. The name of the project to which this + translation request belongs. Example: + ``projects/foo/locations/bar`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + source_dialect (:class:`google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest.SqlTranslationSourceDialect`): + Required. The source SQL dialect of ``queries``. + This corresponds to the ``source_dialect`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Required. The query to be translated. + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.types.TranslateQueryResponse: + The response of translating a SQL + query to Standard SQL. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, source_dialect, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = translation_service.TranslateQueryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if source_dialect is not None: + request.source_dialect = source_dialect + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.translate_query, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SqlTranslationServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SqlTranslationServiceAsyncClient",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py new file mode 100644 index 000000000000..f122320a91c5 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py @@ -0,0 +1,568 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.bigquery_migration_v2alpha.types import translation_service + +from .transports.base import DEFAULT_CLIENT_INFO, SqlTranslationServiceTransport +from .transports.grpc import SqlTranslationServiceGrpcTransport +from .transports.grpc_asyncio import SqlTranslationServiceGrpcAsyncIOTransport + + +class SqlTranslationServiceClientMeta(type): + """Metaclass for the SqlTranslationService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SqlTranslationServiceTransport]] + _transport_registry["grpc"] = SqlTranslationServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SqlTranslationServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SqlTranslationServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SqlTranslationServiceClient(metaclass=SqlTranslationServiceClientMeta): + """Provides other SQL dialects to GoogleSQL translation + operations. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "bigquerymigration.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SqlTranslationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SqlTranslationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SqlTranslationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SqlTranslationServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SqlTranslationServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the sql translation service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SqlTranslationServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SqlTranslationServiceTransport): + # transport is a SqlTranslationServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def translate_query( + self, + request: Optional[ + Union[translation_service.TranslateQueryRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + source_dialect: Optional[ + translation_service.TranslateQueryRequest.SqlTranslationSourceDialect + ] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> translation_service.TranslateQueryResponse: + r"""Translates input queries from source dialects to + GoogleSQL. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_migration_v2alpha + + def sample_translate_query(): + # Create a client + client = bigquery_migration_v2alpha.SqlTranslationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.TranslateQueryRequest( + parent="parent_value", + source_dialect="TERADATA", + query="query_value", + ) + + # Make the request + response = client.translate_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest, dict]): + The request object. The request of translating a SQL + query to Standard SQL. + parent (str): + Required. The name of the project to which this + translation request belongs. Example: + ``projects/foo/locations/bar`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + source_dialect (google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest.SqlTranslationSourceDialect): + Required. The source SQL dialect of ``queries``. + This corresponds to the ``source_dialect`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Required. The query to be translated. + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_migration_v2alpha.types.TranslateQueryResponse: + The response of translating a SQL + query to Standard SQL. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, source_dialect, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a translation_service.TranslateQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, translation_service.TranslateQueryRequest): + request = translation_service.TranslateQueryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if source_dialect is not None: + request.source_dialect = source_dialect + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.translate_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SqlTranslationServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SqlTranslationServiceClient",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/__init__.py new file mode 100644 index 000000000000..45b3195423d8 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/__init__.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SqlTranslationServiceTransport +from .grpc import SqlTranslationServiceGrpcTransport +from .grpc_asyncio import SqlTranslationServiceGrpcAsyncIOTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[SqlTranslationServiceTransport]] +_transport_registry["grpc"] = SqlTranslationServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SqlTranslationServiceGrpcAsyncIOTransport + +__all__ = ( + "SqlTranslationServiceTransport", + "SqlTranslationServiceGrpcTransport", + "SqlTranslationServiceGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/base.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/base.py new file mode 100644 index 000000000000..922e95b80fdd --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/base.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version +from google.cloud.bigquery_migration_v2alpha.types import translation_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SqlTranslationServiceTransport(abc.ABC): + """Abstract transport class for SqlTranslationService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "bigquerymigration.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.translate_query: gapic_v1.method.wrap_method( + self.translate_query, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def translate_query( + self, + ) -> Callable[ + [translation_service.TranslateQueryRequest], + Union[ + translation_service.TranslateQueryResponse, + Awaitable[translation_service.TranslateQueryResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SqlTranslationServiceTransport",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc.py new file mode 100644 index 000000000000..3f1e2981f6c2 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc.py @@ -0,0 +1,270 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.bigquery_migration_v2alpha.types import translation_service + +from .base import DEFAULT_CLIENT_INFO, SqlTranslationServiceTransport + + +class SqlTranslationServiceGrpcTransport(SqlTranslationServiceTransport): + """gRPC backend transport for SqlTranslationService. + + Provides other SQL dialects to GoogleSQL translation + operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def translate_query( + self, + ) -> Callable[ + [translation_service.TranslateQueryRequest], + translation_service.TranslateQueryResponse, + ]: + r"""Return a callable for the translate query method over gRPC. + + Translates input queries from source dialects to + GoogleSQL. + + Returns: + Callable[[~.TranslateQueryRequest], + ~.TranslateQueryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "translate_query" not in self._stubs: + self._stubs["translate_query"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.SqlTranslationService/TranslateQuery", + request_serializer=translation_service.TranslateQueryRequest.serialize, + response_deserializer=translation_service.TranslateQueryResponse.deserialize, + ) + return self._stubs["translate_query"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SqlTranslationServiceGrpcTransport",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..5d744b22d4ef --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc_asyncio.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.bigquery_migration_v2alpha.types import translation_service + +from .base import DEFAULT_CLIENT_INFO, SqlTranslationServiceTransport +from .grpc import SqlTranslationServiceGrpcTransport + + +class SqlTranslationServiceGrpcAsyncIOTransport(SqlTranslationServiceTransport): + """gRPC AsyncIO backend transport for SqlTranslationService. + + Provides other SQL dialects to GoogleSQL translation + operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "bigquerymigration.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def translate_query( + self, + ) -> Callable[ + [translation_service.TranslateQueryRequest], + Awaitable[translation_service.TranslateQueryResponse], + ]: + r"""Return a callable for the translate query method over gRPC. + + Translates input queries from source dialects to + GoogleSQL. + + Returns: + Callable[[~.TranslateQueryRequest], + Awaitable[~.TranslateQueryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "translate_query" not in self._stubs: + self._stubs["translate_query"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.migration.v2alpha.SqlTranslationService/TranslateQuery", + request_serializer=translation_service.TranslateQueryRequest.serialize, + response_deserializer=translation_service.TranslateQueryResponse.deserialize, + ) + return self._stubs["translate_query"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("SqlTranslationServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/__init__.py new file mode 100644 index 000000000000..361046d192bd --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/__init__.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .assessment_task import AssessmentOrchestrationResultDetails, AssessmentTaskDetails +from .migration_entities import ( + MigrationSubtask, + MigrationTask, + MigrationTaskOrchestrationResult, + MigrationWorkflow, +) +from .migration_error_details import ErrorDetail, ErrorLocation, ResourceErrorDetail +from .migration_metrics import Point, TimeInterval, TimeSeries, TypedValue +from .migration_service import ( + CreateMigrationWorkflowRequest, + DeleteMigrationWorkflowRequest, + GetMigrationSubtaskRequest, + GetMigrationWorkflowRequest, + ListMigrationSubtasksRequest, + ListMigrationSubtasksResponse, + ListMigrationWorkflowsRequest, + ListMigrationWorkflowsResponse, + StartMigrationWorkflowRequest, +) +from .translation_service import ( + SqlTranslationError, + SqlTranslationErrorDetail, + SqlTranslationWarning, + TranslateQueryRequest, + TranslateQueryResponse, +) +from .translation_task import ( + BteqOptions, + DatasetReference, + Filter, + IdentifierSettings, + TeradataOptions, + TranslationFileMapping, + TranslationTaskDetails, +) + +__all__ = ( + "AssessmentOrchestrationResultDetails", + "AssessmentTaskDetails", + "MigrationSubtask", + "MigrationTask", + "MigrationTaskOrchestrationResult", + "MigrationWorkflow", + "ErrorDetail", + "ErrorLocation", + "ResourceErrorDetail", + "Point", + "TimeInterval", + "TimeSeries", + "TypedValue", + "CreateMigrationWorkflowRequest", + "DeleteMigrationWorkflowRequest", + "GetMigrationSubtaskRequest", + "GetMigrationWorkflowRequest", + "ListMigrationSubtasksRequest", + "ListMigrationSubtasksResponse", + "ListMigrationWorkflowsRequest", + "ListMigrationWorkflowsResponse", + "StartMigrationWorkflowRequest", + "SqlTranslationError", + "SqlTranslationErrorDetail", + "SqlTranslationWarning", + "TranslateQueryRequest", + "TranslateQueryResponse", + "BteqOptions", + "DatasetReference", + "Filter", + "IdentifierSettings", + "TeradataOptions", + "TranslationFileMapping", + "TranslationTaskDetails", +) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/assessment_task.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/assessment_task.py new file mode 100644 index 000000000000..f7ecf68edd17 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/assessment_task.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2alpha", + manifest={ + "AssessmentTaskDetails", + "AssessmentOrchestrationResultDetails", + }, +) + + +class AssessmentTaskDetails(proto.Message): + r"""Assessment task config. + + Attributes: + input_path (str): + Required. The Cloud Storage path for + assessment input files. + output_dataset (str): + Required. The BigQuery dataset for output. + querylogs_path (str): + Optional. An optional Cloud Storage path to + write the query logs (which is then used as an + input path on the translation task) + data_source (str): + Required. The data source or data warehouse + type (eg: TERADATA/REDSHIFT) from which the + input data is extracted. + """ + + input_path: str = proto.Field( + proto.STRING, + number=1, + ) + output_dataset: str = proto.Field( + proto.STRING, + number=2, + ) + querylogs_path: str = proto.Field( + proto.STRING, + number=3, + ) + data_source: str = proto.Field( + proto.STRING, + number=4, + ) + + +class AssessmentOrchestrationResultDetails(proto.Message): + r"""Details for an assessment task orchestration result. + + Attributes: + output_tables_schema_version (str): + Optional. The version used for the output + table schemas. + """ + + output_tables_schema_version: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py new file mode 100644 index 000000000000..0d99fc024ccb --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import error_details_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.bigquery_migration_v2alpha.types import ( + assessment_task, + migration_error_details, + migration_metrics, + translation_task, +) + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2alpha", + manifest={ + "MigrationWorkflow", + "MigrationTask", + "MigrationSubtask", + "MigrationTaskOrchestrationResult", + }, +) + + +class MigrationWorkflow(proto.Message): + r"""A migration workflow which specifies what needs to be done + for an EDW migration. + + Attributes: + name (str): + Output only. Immutable. The unique identifier for the + migration workflow. The ID is server-generated. + + Example: ``projects/123/locations/us/workflows/345`` + display_name (str): + The display name of the workflow. This can be + set to give a workflow a descriptive name. There + is no guarantee or enforcement of uniqueness. + tasks (MutableMapping[str, google.cloud.bigquery_migration_v2alpha.types.MigrationTask]): + The tasks in a workflow in a named map. The + name (i.e. key) has no meaning and is merely a + convenient way to address a specific task in a + workflow. + state (google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow.State): + Output only. That status of the workflow. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the workflow was created. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the workflow was last updated. + """ + + class State(proto.Enum): + r"""Possible migration workflow states. + + Values: + STATE_UNSPECIFIED (0): + Workflow state is unspecified. + DRAFT (1): + Workflow is in draft status, i.e. tasks are + not yet eligible for execution. + RUNNING (2): + Workflow is running (i.e. tasks are eligible + for execution). + PAUSED (3): + Workflow is paused. Tasks currently in + progress may continue, but no further tasks will + be scheduled. + COMPLETED (4): + Workflow is complete. There should not be any + task in a non-terminal state, but if they are + (e.g. forced termination), they will not be + scheduled. + """ + STATE_UNSPECIFIED = 0 + DRAFT = 1 + RUNNING = 2 + PAUSED = 3 + COMPLETED = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + tasks: MutableMapping[str, "MigrationTask"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="MigrationTask", + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class MigrationTask(proto.Message): + r"""A single task for a migration which has details about the + configuration of the task. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + assessment_task_details (google.cloud.bigquery_migration_v2alpha.types.AssessmentTaskDetails): + Task configuration for Assessment. + + This field is a member of `oneof`_ ``task_details``. + translation_task_details (google.cloud.bigquery_migration_v2alpha.types.TranslationTaskDetails): + Task configuration for Batch/Offline SQL + Translation. + + This field is a member of `oneof`_ ``task_details``. + id (str): + Output only. Immutable. The unique identifier + for the migration task. The ID is + server-generated. + type_ (str): + The type of the task. This must be a + supported task type. + details (google.protobuf.any_pb2.Any): + DEPRECATED! Use one of the task_details below. The details + of the task. The type URL must be one of the supported task + details messages and correspond to the Task's type. + state (google.cloud.bigquery_migration_v2alpha.types.MigrationTask.State): + Output only. The current state of the task. + processing_error (google.rpc.error_details_pb2.ErrorInfo): + Output only. An explanation that may be + populated when the task is in FAILED state. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the task was created. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the task was last updated. + orchestration_result (google.cloud.bigquery_migration_v2alpha.types.MigrationTaskOrchestrationResult): + Output only. Additional information about the + orchestration. + """ + + class State(proto.Enum): + r"""Possible states of a migration task. + + Values: + STATE_UNSPECIFIED (0): + The state is unspecified. + PENDING (1): + The task is waiting for orchestration. + ORCHESTRATING (2): + The task is assigned to an orchestrator. + RUNNING (3): + The task is running, i.e. its subtasks are + ready for execution. + PAUSED (4): + Tha task is paused. Assigned subtasks can + continue, but no new subtasks will be scheduled. + SUCCEEDED (5): + The task finished successfully. + FAILED (6): + The task finished unsuccessfully. + """ + STATE_UNSPECIFIED = 0 + PENDING = 1 + ORCHESTRATING = 2 + RUNNING = 3 + PAUSED = 4 + SUCCEEDED = 5 + FAILED = 6 + + assessment_task_details: assessment_task.AssessmentTaskDetails = proto.Field( + proto.MESSAGE, + number=12, + oneof="task_details", + message=assessment_task.AssessmentTaskDetails, + ) + translation_task_details: translation_task.TranslationTaskDetails = proto.Field( + proto.MESSAGE, + number=13, + oneof="task_details", + message=translation_task.TranslationTaskDetails, + ) + id: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + details: any_pb2.Any = proto.Field( + proto.MESSAGE, + number=3, + message=any_pb2.Any, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + processing_error: error_details_pb2.ErrorInfo = proto.Field( + proto.MESSAGE, + number=5, + message=error_details_pb2.ErrorInfo, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + orchestration_result: "MigrationTaskOrchestrationResult" = proto.Field( + proto.MESSAGE, + number=10, + message="MigrationTaskOrchestrationResult", + ) + + +class MigrationSubtask(proto.Message): + r"""A subtask for a migration which carries details about the + configuration of the subtask. The content of the details should + not matter to the end user, but is a contract between the + subtask creator and subtask worker. + + Attributes: + name (str): + Output only. Immutable. The resource name for the migration + subtask. The ID is server-generated. + + Example: + ``projects/123/locations/us/workflows/345/subtasks/678`` + task_id (str): + The unique ID of the task to which this + subtask belongs. + type_ (str): + The type of the Subtask. The migration + service does not check whether this is a known + type. It is up to the task creator (i.e. + orchestrator or worker) to ensure it only + creates subtasks for which there are compatible + workers polling for Subtasks. + state (google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask.State): + Output only. The current state of the + subtask. + processing_error (google.rpc.error_details_pb2.ErrorInfo): + Output only. An explanation that may be + populated when the task is in FAILED state. + resource_error_details (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.ResourceErrorDetail]): + Output only. Provides details to errors and + issues encountered while processing the subtask. + Presence of error details does not mean that the + subtask failed. + resource_error_count (int): + The number or resources with errors. Note: This is not the + total number of errors as each resource can have more than + one error. This is used to indicate truncation by having a + ``resource_error_count`` that is higher than the size of + ``resource_error_details``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the subtask was created. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the subtask was last updated. + metrics (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.TimeSeries]): + The metrics for the subtask. + """ + + class State(proto.Enum): + r"""Possible states of a migration subtask. + + Values: + STATE_UNSPECIFIED (0): + The state is unspecified. + ACTIVE (1): + The subtask is ready, i.e. it is ready for + execution. + RUNNING (2): + The subtask is running, i.e. it is assigned + to a worker for execution. + SUCCEEDED (3): + The subtask finished successfully. + FAILED (4): + The subtask finished unsuccessfully. + PAUSED (5): + The subtask is paused, i.e., it will not be + scheduled. If it was already assigned,it might + still finish but no new lease renewals will be + granted. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + RUNNING = 2 + SUCCEEDED = 3 + FAILED = 4 + PAUSED = 5 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + task_id: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + processing_error: error_details_pb2.ErrorInfo = proto.Field( + proto.MESSAGE, + number=6, + message=error_details_pb2.ErrorInfo, + ) + resource_error_details: MutableSequence[ + migration_error_details.ResourceErrorDetail + ] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=migration_error_details.ResourceErrorDetail, + ) + resource_error_count: int = proto.Field( + proto.INT32, + number=13, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + metrics: MutableSequence[migration_metrics.TimeSeries] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message=migration_metrics.TimeSeries, + ) + + +class MigrationTaskOrchestrationResult(proto.Message): + r"""Additional information from the orchestrator when it is done + with the task orchestration. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + assessment_details (google.cloud.bigquery_migration_v2alpha.types.AssessmentOrchestrationResultDetails): + Details specific to assessment task types. + + This field is a member of `oneof`_ ``details``. + """ + + assessment_details: assessment_task.AssessmentOrchestrationResultDetails = ( + proto.Field( + proto.MESSAGE, + number=1, + oneof="details", + message=assessment_task.AssessmentOrchestrationResultDetails, + ) + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_error_details.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_error_details.py new file mode 100644 index 000000000000..26ddd745ff93 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_error_details.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.rpc import error_details_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2alpha", + manifest={ + "ResourceErrorDetail", + "ErrorDetail", + "ErrorLocation", + }, +) + + +class ResourceErrorDetail(proto.Message): + r"""Provides details for errors and the corresponding resources. + + Attributes: + resource_info (google.rpc.error_details_pb2.ResourceInfo): + Required. Information about the resource + where the error is located. + error_details (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.ErrorDetail]): + Required. The error details for the resource. + error_count (int): + Required. How many errors there are in total for the + resource. Truncation can be indicated by having an + ``error_count`` that is higher than the size of + ``error_details``. + """ + + resource_info: error_details_pb2.ResourceInfo = proto.Field( + proto.MESSAGE, + number=1, + message=error_details_pb2.ResourceInfo, + ) + error_details: MutableSequence["ErrorDetail"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ErrorDetail", + ) + error_count: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ErrorDetail(proto.Message): + r"""Provides details for errors, e.g. issues that where + encountered when processing a subtask. + + Attributes: + location (google.cloud.bigquery_migration_v2alpha.types.ErrorLocation): + Optional. The exact location within the + resource (if applicable). + error_info (google.rpc.error_details_pb2.ErrorInfo): + Required. Describes the cause of the error + with structured detail. + """ + + location: "ErrorLocation" = proto.Field( + proto.MESSAGE, + number=1, + message="ErrorLocation", + ) + error_info: error_details_pb2.ErrorInfo = proto.Field( + proto.MESSAGE, + number=2, + message=error_details_pb2.ErrorInfo, + ) + + +class ErrorLocation(proto.Message): + r"""Holds information about where the error is located. + + Attributes: + line (int): + Optional. If applicable, denotes the line + where the error occurred. A zero value means + that there is no line information. + column (int): + Optional. If applicable, denotes the column + where the error occurred. A zero value means + that there is no columns information. + """ + + line: int = proto.Field( + proto.INT32, + number=1, + ) + column: int = proto.Field( + proto.INT32, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_metrics.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_metrics.py new file mode 100644 index 000000000000..cfcd595a48d3 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_metrics.py @@ -0,0 +1,212 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2alpha", + manifest={ + "TimeSeries", + "Point", + "TimeInterval", + "TypedValue", + }, +) + + +class TimeSeries(proto.Message): + r"""The metrics object for a SubTask. + + Attributes: + metric (str): + Required. The name of the metric. + + If the metric is not known by the service yet, + it will be auto-created. + value_type (google.api.metric_pb2.ValueType): + Required. The value type of the time series. + metric_kind (google.api.metric_pb2.MetricKind): + Optional. The metric kind of the time series. + + If present, it must be the same as the metric kind of the + associated metric. If the associated metric's descriptor + must be auto-created, then this field specifies the metric + kind of the new descriptor and must be either ``GAUGE`` (the + default) or ``CUMULATIVE``. + points (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.Point]): + Required. The data points of this time series. When listing + time series, points are returned in reverse time order. + + When creating a time series, this field must contain exactly + one point and the point's type must be the same as the value + type of the associated metric. If the associated metric's + descriptor must be auto-created, then the value type of the + descriptor is determined by the point's type, which must be + ``BOOL``, ``INT64``, ``DOUBLE``, or ``DISTRIBUTION``. + """ + + metric: str = proto.Field( + proto.STRING, + number=1, + ) + value_type: metric_pb2.MetricDescriptor.ValueType = proto.Field( + proto.ENUM, + number=2, + enum=metric_pb2.MetricDescriptor.ValueType, + ) + metric_kind: metric_pb2.MetricDescriptor.MetricKind = proto.Field( + proto.ENUM, + number=3, + enum=metric_pb2.MetricDescriptor.MetricKind, + ) + points: MutableSequence["Point"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Point", + ) + + +class Point(proto.Message): + r"""A single data point in a time series. + + Attributes: + interval (google.cloud.bigquery_migration_v2alpha.types.TimeInterval): + The time interval to which the data point applies. For + ``GAUGE`` metrics, the start time does not need to be + supplied, but if it is supplied, it must equal the end time. + For ``DELTA`` metrics, the start and end time should specify + a non-zero interval, with subsequent points specifying + contiguous and non-overlapping intervals. For ``CUMULATIVE`` + metrics, the start and end time should specify a non-zero + interval, with subsequent points specifying the same start + time and increasing end times, until an event resets the + cumulative value to zero and sets a new start time for the + following points. + value (google.cloud.bigquery_migration_v2alpha.types.TypedValue): + The value of the data point. + """ + + interval: "TimeInterval" = proto.Field( + proto.MESSAGE, + number=1, + message="TimeInterval", + ) + value: "TypedValue" = proto.Field( + proto.MESSAGE, + number=2, + message="TypedValue", + ) + + +class TimeInterval(proto.Message): + r"""A time interval extending just after a start time through an + end time. If the start time is the same as the end time, then + the interval represents a single point in time. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The beginning of the time interval. + The default value for the start time is the end + time. The start time must not be later than the + end time. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The end of the time interval. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class TypedValue(proto.Message): + r"""A single strongly-typed value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bool_value (bool): + A Boolean value: ``true`` or ``false``. + + This field is a member of `oneof`_ ``value``. + int64_value (int): + A 64-bit integer. Its range is approximately + +/-9.2x10^18. + + This field is a member of `oneof`_ ``value``. + double_value (float): + A 64-bit double-precision floating-point + number. Its magnitude is approximately + +/-10^(+/-300) and it has 16 significant digits + of precision. + + This field is a member of `oneof`_ ``value``. + string_value (str): + A variable-length string value. + + This field is a member of `oneof`_ ``value``. + distribution_value (google.api.distribution_pb2.Distribution): + A distribution value. + + This field is a member of `oneof`_ ``value``. + """ + + bool_value: bool = proto.Field( + proto.BOOL, + number=1, + oneof="value", + ) + int64_value: int = proto.Field( + proto.INT64, + number=2, + oneof="value", + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=3, + oneof="value", + ) + string_value: str = proto.Field( + proto.STRING, + number=4, + oneof="value", + ) + distribution_value: distribution_pb2.Distribution = proto.Field( + proto.MESSAGE, + number=5, + oneof="value", + message=distribution_pb2.Distribution, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_service.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_service.py new file mode 100644 index 000000000000..996851f3507a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/migration_service.py @@ -0,0 +1,291 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.bigquery_migration_v2alpha.types import migration_entities + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2alpha", + manifest={ + "CreateMigrationWorkflowRequest", + "GetMigrationWorkflowRequest", + "ListMigrationWorkflowsRequest", + "ListMigrationWorkflowsResponse", + "DeleteMigrationWorkflowRequest", + "StartMigrationWorkflowRequest", + "GetMigrationSubtaskRequest", + "ListMigrationSubtasksRequest", + "ListMigrationSubtasksResponse", + }, +) + + +class CreateMigrationWorkflowRequest(proto.Message): + r"""Request to create a migration workflow resource. + + Attributes: + parent (str): + Required. The name of the project to which this migration + workflow belongs. Example: ``projects/foo/locations/bar`` + migration_workflow (google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow): + Required. The migration workflow to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + migration_workflow: migration_entities.MigrationWorkflow = proto.Field( + proto.MESSAGE, + number=2, + message=migration_entities.MigrationWorkflow, + ) + + +class GetMigrationWorkflowRequest(proto.Message): + r"""A request to get a previously created migration workflow. + + Attributes: + name (str): + Required. The unique identifier for the migration workflow. + Example: ``projects/123/locations/us/workflows/1234`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be retrieved. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListMigrationWorkflowsRequest(proto.Message): + r"""A request to list previously created migration workflows. + + Attributes: + parent (str): + Required. The project and location of the migration + workflows to list. Example: ``projects/123/locations/us`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to be retrieved. + page_size (int): + The maximum number of migration workflows to + return. The service may return fewer than this + number. + page_token (str): + A page token, received from previous + ``ListMigrationWorkflows`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListMigrationWorkflows`` must match the call that provided + the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListMigrationWorkflowsResponse(proto.Message): + r"""Response object for a ``ListMigrationWorkflows`` call. + + Attributes: + migration_workflows (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow]): + The migration workflows for the specified + project / location. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + migration_workflows: MutableSequence[ + migration_entities.MigrationWorkflow + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=migration_entities.MigrationWorkflow, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteMigrationWorkflowRequest(proto.Message): + r"""A request to delete a previously created migration workflow. + + Attributes: + name (str): + Required. The unique identifier for the migration workflow. + Example: ``projects/123/locations/us/workflows/1234`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class StartMigrationWorkflowRequest(proto.Message): + r"""A request to start a previously created migration workflow. + + Attributes: + name (str): + Required. The unique identifier for the migration workflow. + Example: ``projects/123/locations/us/workflows/1234`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetMigrationSubtaskRequest(proto.Message): + r"""A request to get a previously created migration subtasks. + + Attributes: + name (str): + Required. The unique identifier for the migration subtask. + Example: + ``projects/123/locations/us/workflows/1234/subtasks/543`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to be retrieved. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListMigrationSubtasksRequest(proto.Message): + r"""A request to list previously created migration subtasks. + + Attributes: + parent (str): + Required. The migration task of the subtasks to list. + Example: ``projects/123/locations/us/workflows/1234`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to be retrieved. + page_size (int): + Optional. The maximum number of migration + tasks to return. The service may return fewer + than this number. + page_token (str): + Optional. A page token, received from previous + ``ListMigrationSubtasks`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListMigrationSubtasks`` must match the call that provided + the page token. + filter (str): + Optional. The filter to apply. This can be used to get the + subtasks of a specific tasks in a workflow, e.g. + ``migration_task = "ab012"`` where ``"ab012"`` is the task + ID (not the name in the named map). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMigrationSubtasksResponse(proto.Message): + r"""Response object for a ``ListMigrationSubtasks`` call. + + Attributes: + migration_subtasks (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask]): + The migration subtasks for the specified + task. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + migration_subtasks: MutableSequence[ + migration_entities.MigrationSubtask + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=migration_entities.MigrationSubtask, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/translation_service.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/translation_service.py new file mode 100644 index 000000000000..5fb880f74aa8 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/translation_service.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2alpha", + manifest={ + "TranslateQueryRequest", + "TranslateQueryResponse", + "SqlTranslationErrorDetail", + "SqlTranslationError", + "SqlTranslationWarning", + }, +) + + +class TranslateQueryRequest(proto.Message): + r"""The request of translating a SQL query to Standard SQL. + + Attributes: + parent (str): + Required. The name of the project to which this translation + request belongs. Example: ``projects/foo/locations/bar`` + source_dialect (google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest.SqlTranslationSourceDialect): + Required. The source SQL dialect of ``queries``. + query (str): + Required. The query to be translated. + """ + + class SqlTranslationSourceDialect(proto.Enum): + r"""Supported SQL translation source dialects. + + Values: + SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED (0): + SqlTranslationSourceDialect not specified. + TERADATA (1): + Teradata SQL. + """ + SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED = 0 + TERADATA = 1 + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + source_dialect: SqlTranslationSourceDialect = proto.Field( + proto.ENUM, + number=2, + enum=SqlTranslationSourceDialect, + ) + query: str = proto.Field( + proto.STRING, + number=3, + ) + + +class TranslateQueryResponse(proto.Message): + r"""The response of translating a SQL query to Standard SQL. + + Attributes: + translation_job (str): + Output only. Immutable. The unique identifier for the SQL + translation job. Example: + ``projects/123/locations/us/translation/1234`` + translated_query (str): + The translated result. This will be empty if + the translation fails. + errors (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.SqlTranslationError]): + The list of errors encountered during the + translation, if present. + warnings (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.SqlTranslationWarning]): + The list of warnings encountered during the + translation, if present, indicates + non-semantically correct translation. + """ + + translation_job: str = proto.Field( + proto.STRING, + number=4, + ) + translated_query: str = proto.Field( + proto.STRING, + number=1, + ) + errors: MutableSequence["SqlTranslationError"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="SqlTranslationError", + ) + warnings: MutableSequence["SqlTranslationWarning"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="SqlTranslationWarning", + ) + + +class SqlTranslationErrorDetail(proto.Message): + r"""Structured error object capturing the error message and the + location in the source text where the error occurs. + + Attributes: + row (int): + Specifies the row from the source text where + the error occurred. + column (int): + Specifie the column from the source texts + where the error occurred. + message (str): + A human-readable description of the error. + """ + + row: int = proto.Field( + proto.INT64, + number=1, + ) + column: int = proto.Field( + proto.INT64, + number=2, + ) + message: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SqlTranslationError(proto.Message): + r"""The detailed error object if the SQL translation job fails. + + Attributes: + error_type (google.cloud.bigquery_migration_v2alpha.types.SqlTranslationError.SqlTranslationErrorType): + The type of SQL translation error. + error_detail (google.cloud.bigquery_migration_v2alpha.types.SqlTranslationErrorDetail): + Specifies the details of the error, including + the error message and location from the source + text. + """ + + class SqlTranslationErrorType(proto.Enum): + r"""The error type of the SQL translation job. + + Values: + SQL_TRANSLATION_ERROR_TYPE_UNSPECIFIED (0): + SqlTranslationErrorType not specified. + SQL_PARSE_ERROR (1): + Failed to parse the input text as a SQL + query. + UNSUPPORTED_SQL_FUNCTION (2): + Found unsupported functions in the input SQL + query that are not able to translate. + """ + SQL_TRANSLATION_ERROR_TYPE_UNSPECIFIED = 0 + SQL_PARSE_ERROR = 1 + UNSUPPORTED_SQL_FUNCTION = 2 + + error_type: SqlTranslationErrorType = proto.Field( + proto.ENUM, + number=1, + enum=SqlTranslationErrorType, + ) + error_detail: "SqlTranslationErrorDetail" = proto.Field( + proto.MESSAGE, + number=2, + message="SqlTranslationErrorDetail", + ) + + +class SqlTranslationWarning(proto.Message): + r"""The detailed warning object if the SQL translation job is + completed but not semantically correct. + + Attributes: + warning_detail (google.cloud.bigquery_migration_v2alpha.types.SqlTranslationErrorDetail): + Specifies the details of the warning, + including the warning message and location from + the source text. + """ + + warning_detail: "SqlTranslationErrorDetail" = proto.Field( + proto.MESSAGE, + number=1, + message="SqlTranslationErrorDetail", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/translation_task.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/translation_task.py new file mode 100644 index 000000000000..078059b79b9a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/types/translation_task.py @@ -0,0 +1,368 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2alpha", + manifest={ + "TranslationFileMapping", + "TranslationTaskDetails", + "Filter", + "IdentifierSettings", + "TeradataOptions", + "BteqOptions", + "DatasetReference", + }, +) + + +class TranslationFileMapping(proto.Message): + r"""Mapping between an input and output file to be translated in + a subtask. + + Attributes: + input_path (str): + The Cloud Storage path for a file to + translation in a subtask. + output_path (str): + The Cloud Storage path to write back the + corresponding input file to. + """ + + input_path: str = proto.Field( + proto.STRING, + number=1, + ) + output_path: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TranslationTaskDetails(proto.Message): + r"""The translation task config to capture necessary settings for + a translation task and subtask. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + teradata_options (google.cloud.bigquery_migration_v2alpha.types.TeradataOptions): + The Teradata SQL specific settings for the + translation task. + + This field is a member of `oneof`_ ``language_options``. + bteq_options (google.cloud.bigquery_migration_v2alpha.types.BteqOptions): + The BTEQ specific settings for the + translation task. + + This field is a member of `oneof`_ ``language_options``. + input_path (str): + The Cloud Storage path for translation input + files. + output_path (str): + The Cloud Storage path for translation output + files. + file_paths (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.TranslationFileMapping]): + Cloud Storage files to be processed for + translation. + schema_path (str): + The Cloud Storage path to DDL files as table + schema to assist semantic translation. + file_encoding (google.cloud.bigquery_migration_v2alpha.types.TranslationTaskDetails.FileEncoding): + The file encoding type. + identifier_settings (google.cloud.bigquery_migration_v2alpha.types.IdentifierSettings): + The settings for SQL identifiers. + special_token_map (MutableMapping[str, google.cloud.bigquery_migration_v2alpha.types.TranslationTaskDetails.TokenType]): + The map capturing special tokens to be + replaced during translation. The key is special + token in string. The value is the token data + type. This is used to translate SQL query + template which contains special token as place + holder. The special token makes a query invalid + to parse. This map will be applied to annotate + those special token with types to let parser + understand how to parse them into proper + structure with type information. + filter (google.cloud.bigquery_migration_v2alpha.types.Filter): + The filter applied to translation details. + translation_exception_table (str): + Specifies the exact name of the bigquery + table ("dataset.table") to be used for surfacing + raw translation errors. If the table does not + exist, we will create it. If it already exists + and the schema is the same, we will re-use. If + the table exists and the schema is different, we + will throw an error. + """ + + class FileEncoding(proto.Enum): + r"""The file encoding types. + + Values: + FILE_ENCODING_UNSPECIFIED (0): + File encoding setting is not specified. + UTF_8 (1): + File encoding is UTF_8. + ISO_8859_1 (2): + File encoding is ISO_8859_1. + US_ASCII (3): + File encoding is US_ASCII. + UTF_16 (4): + File encoding is UTF_16. + UTF_16LE (5): + File encoding is UTF_16LE. + UTF_16BE (6): + File encoding is UTF_16BE. + """ + FILE_ENCODING_UNSPECIFIED = 0 + UTF_8 = 1 + ISO_8859_1 = 2 + US_ASCII = 3 + UTF_16 = 4 + UTF_16LE = 5 + UTF_16BE = 6 + + class TokenType(proto.Enum): + r"""The special token data type. + + Values: + TOKEN_TYPE_UNSPECIFIED (0): + Token type is not specified. + STRING (1): + Token type as string. + INT64 (2): + Token type as integer. + NUMERIC (3): + Token type as numeric. + BOOL (4): + Token type as boolean. + FLOAT64 (5): + Token type as float. + DATE (6): + Token type as date. + TIMESTAMP (7): + Token type as timestamp. + """ + TOKEN_TYPE_UNSPECIFIED = 0 + STRING = 1 + INT64 = 2 + NUMERIC = 3 + BOOL = 4 + FLOAT64 = 5 + DATE = 6 + TIMESTAMP = 7 + + teradata_options: "TeradataOptions" = proto.Field( + proto.MESSAGE, + number=10, + oneof="language_options", + message="TeradataOptions", + ) + bteq_options: "BteqOptions" = proto.Field( + proto.MESSAGE, + number=11, + oneof="language_options", + message="BteqOptions", + ) + input_path: str = proto.Field( + proto.STRING, + number=1, + ) + output_path: str = proto.Field( + proto.STRING, + number=2, + ) + file_paths: MutableSequence["TranslationFileMapping"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="TranslationFileMapping", + ) + schema_path: str = proto.Field( + proto.STRING, + number=3, + ) + file_encoding: FileEncoding = proto.Field( + proto.ENUM, + number=4, + enum=FileEncoding, + ) + identifier_settings: "IdentifierSettings" = proto.Field( + proto.MESSAGE, + number=5, + message="IdentifierSettings", + ) + special_token_map: MutableMapping[str, TokenType] = proto.MapField( + proto.STRING, + proto.ENUM, + number=6, + enum=TokenType, + ) + filter: "Filter" = proto.Field( + proto.MESSAGE, + number=7, + message="Filter", + ) + translation_exception_table: str = proto.Field( + proto.STRING, + number=13, + ) + + +class Filter(proto.Message): + r"""The filter applied to fields of translation details. + + Attributes: + input_file_exclusion_prefixes (MutableSequence[str]): + The list of prefixes used to exclude + processing for input files. + """ + + input_file_exclusion_prefixes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class IdentifierSettings(proto.Message): + r"""Settings related to SQL identifiers. + + Attributes: + output_identifier_case (google.cloud.bigquery_migration_v2alpha.types.IdentifierSettings.IdentifierCase): + The setting to control output queries' + identifier case. + identifier_rewrite_mode (google.cloud.bigquery_migration_v2alpha.types.IdentifierSettings.IdentifierRewriteMode): + Specifies the rewrite mode for SQL + identifiers. + """ + + class IdentifierCase(proto.Enum): + r"""The identifier case type. + + Values: + IDENTIFIER_CASE_UNSPECIFIED (0): + The identifier case is not specified. + ORIGINAL (1): + Identifiers' cases will be kept as the + original cases. + UPPER (2): + Identifiers will be in upper cases. + LOWER (3): + Identifiers will be in lower cases. + """ + IDENTIFIER_CASE_UNSPECIFIED = 0 + ORIGINAL = 1 + UPPER = 2 + LOWER = 3 + + class IdentifierRewriteMode(proto.Enum): + r"""The SQL identifier rewrite mode. + + Values: + IDENTIFIER_REWRITE_MODE_UNSPECIFIED (0): + SQL Identifier rewrite mode is unspecified. + NONE (1): + SQL identifiers won't be rewrite. + REWRITE_ALL (2): + All SQL identifiers will be rewrite. + """ + IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0 + NONE = 1 + REWRITE_ALL = 2 + + output_identifier_case: IdentifierCase = proto.Field( + proto.ENUM, + number=1, + enum=IdentifierCase, + ) + identifier_rewrite_mode: IdentifierRewriteMode = proto.Field( + proto.ENUM, + number=2, + enum=IdentifierRewriteMode, + ) + + +class TeradataOptions(proto.Message): + r"""Teradata SQL specific translation task related settings.""" + + +class BteqOptions(proto.Message): + r"""BTEQ translation task related settings. + + Attributes: + project_dataset (google.cloud.bigquery_migration_v2alpha.types.DatasetReference): + Specifies the project and dataset in BigQuery + that will be used for external table creation + during the translation. + default_path_uri (str): + The Cloud Storage location to be used as the + default path for files that are not otherwise + specified in the file replacement map. + file_replacement_map (MutableMapping[str, str]): + Maps the local paths that are used in BTEQ + scripts (the keys) to the paths in Cloud Storage + that should be used in their stead in the + translation (the value). + """ + + project_dataset: "DatasetReference" = proto.Field( + proto.MESSAGE, + number=1, + message="DatasetReference", + ) + default_path_uri: str = proto.Field( + proto.STRING, + number=2, + ) + file_replacement_map: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +class DatasetReference(proto.Message): + r"""Reference to a BigQuery dataset. + + Attributes: + dataset_id (str): + A unique ID for this dataset, without the project name. The + ID must contain only letters (a-z, A-Z), numbers (0-9), or + underscores (_). The maximum length is 1,024 characters. + project_id (str): + The ID of the project containing this + dataset. + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-migration/mypy.ini b/packages/google-cloud-bigquery-migration/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-bigquery-migration/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-bigquery-migration/noxfile.py b/packages/google-cloud-bigquery-migration/noxfile.py new file mode 100644 index 000000000000..9a2acd8b6787 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-bigquery-migration/renovate.json b/packages/google-cloud-bigquery-migration/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_async.py new file mode 100644 index 000000000000..f10f2a81fad9 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +async def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_sync.py new file mode 100644 index 000000000000..4bba59355162 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_async.py new file mode 100644 index 000000000000..a859d0fa9867 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +async def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + await client.delete_migration_workflow(request=request) + + +# [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_sync.py new file mode 100644 index 000000000000..22d612babd58 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.delete_migration_workflow(request=request) + + +# [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_async.py new file mode 100644 index 000000000000..0f28503f9783 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationSubtask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +async def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_subtask(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_sync.py new file mode 100644 index 000000000000..7e1bed257360 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationSubtask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_subtask(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_async.py new file mode 100644 index 000000000000..f36496b170d3 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +async def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_sync.py new file mode 100644 index 000000000000..c3b3be90a7ad --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_async.py new file mode 100644 index 000000000000..896e0c346834 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationSubtasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +async def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_sync.py new file mode 100644 index 000000000000..2535708a5c8a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationSubtasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_async.py new file mode 100644 index 000000000000..d150e8a31e5d --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationWorkflows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +async def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_sync.py new file mode 100644 index 000000000000..ced3c83fd462 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationWorkflows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_async.py new file mode 100644 index 000000000000..b2c6aecd4509 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +async def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + await client.start_migration_workflow(request=request) + + +# [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_sync.py new file mode 100644 index 000000000000..191a0ab5fe23 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2 + + +def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.start_migration_workflow(request=request) + + +# [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_async.py new file mode 100644 index 000000000000..39d3a28e2e78 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +async def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_sync.py new file mode 100644 index 000000000000..555d4fbc2ade --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_async.py new file mode 100644 index 000000000000..89cc01b1e861 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +async def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + await client.delete_migration_workflow(request=request) + + +# [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_sync.py new file mode 100644 index 000000000000..c4b5257784be --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.delete_migration_workflow(request=request) + + +# [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_async.py new file mode 100644 index 000000000000..543a752b9230 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationSubtask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +async def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_subtask(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_sync.py new file mode 100644 index 000000000000..1304101a5197 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationSubtask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_subtask(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_async.py new file mode 100644 index 000000000000..4404f253916c --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +async def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_sync.py new file mode 100644 index 000000000000..12b726d8cf5d --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_async.py new file mode 100644 index 000000000000..2a32af8e2843 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationSubtasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +async def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_sync.py new file mode 100644 index 000000000000..a62d56ee7e80 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationSubtasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_async.py new file mode 100644 index 000000000000..8172329b001b --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationWorkflows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +async def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_sync.py new file mode 100644 index 000000000000..8267a7902967 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationWorkflows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_async.py new file mode 100644 index 000000000000..4b26c27c7d83 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +async def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + await client.start_migration_workflow(request=request) + + +# [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_sync.py new file mode 100644 index 000000000000..0b5146073c37 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.start_migration_workflow(request=request) + + +# [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_async.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_async.py new file mode 100644 index 000000000000..571ee3dd5e26 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TranslateQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +async def sample_translate_query(): + # Create a client + client = bigquery_migration_v2alpha.SqlTranslationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.TranslateQueryRequest( + parent="parent_value", + source_dialect="TERADATA", + query="query_value", + ) + + # Make the request + response = await client.translate_query(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_async] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_sync.py b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_sync.py new file mode 100644 index 000000000000..76546f44b29d --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TranslateQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_migration_v2alpha + + +def sample_translate_query(): + # Create a client + client = bigquery_migration_v2alpha.SqlTranslationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.TranslateQueryRequest( + parent="parent_value", + source_dialect="TERADATA", + query="query_value", + ) + + # Make the request + response = client.translate_query(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_sync] diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json new file mode 100644 index 000000000000..c336c8549e42 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json @@ -0,0 +1,1138 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-bigquery-migration", + "version": "0.11.2" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.create_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "CreateMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.CreateMigrationWorkflowRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_workflow", + "type": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow", + "shortName": "create_migration_workflow" + }, + "description": "Sample for CreateMigrationWorkflow", + "file": "bigquerymigration_v2_generated_migration_service_create_migration_workflow_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_create_migration_workflow_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.create_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "CreateMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.CreateMigrationWorkflowRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_workflow", + "type": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow", + "shortName": "create_migration_workflow" + }, + "description": "Sample for CreateMigrationWorkflow", + "file": "bigquerymigration_v2_generated_migration_service_create_migration_workflow_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_create_migration_workflow_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.delete_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "DeleteMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.DeleteMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_migration_workflow" + }, + "description": "Sample for DeleteMigrationWorkflow", + "file": "bigquerymigration_v2_generated_migration_service_delete_migration_workflow_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_delete_migration_workflow_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.delete_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "DeleteMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.DeleteMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_migration_workflow" + }, + "description": "Sample for DeleteMigrationWorkflow", + "file": "bigquerymigration_v2_generated_migration_service_delete_migration_workflow_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_delete_migration_workflow_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.get_migration_subtask", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "GetMigrationSubtask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.GetMigrationSubtaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2.types.MigrationSubtask", + "shortName": "get_migration_subtask" + }, + "description": "Sample for GetMigrationSubtask", + "file": "bigquerymigration_v2_generated_migration_service_get_migration_subtask_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_get_migration_subtask_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.get_migration_subtask", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "GetMigrationSubtask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.GetMigrationSubtaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2.types.MigrationSubtask", + "shortName": "get_migration_subtask" + }, + "description": "Sample for GetMigrationSubtask", + "file": "bigquerymigration_v2_generated_migration_service_get_migration_subtask_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_get_migration_subtask_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.get_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "GetMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.GetMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow", + "shortName": "get_migration_workflow" + }, + "description": "Sample for GetMigrationWorkflow", + "file": "bigquerymigration_v2_generated_migration_service_get_migration_workflow_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_get_migration_workflow_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.get_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "GetMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.GetMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow", + "shortName": "get_migration_workflow" + }, + "description": "Sample for GetMigrationWorkflow", + "file": "bigquerymigration_v2_generated_migration_service_get_migration_workflow_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_get_migration_workflow_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.list_migration_subtasks", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "ListMigrationSubtasks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationSubtasksAsyncPager", + "shortName": "list_migration_subtasks" + }, + "description": "Sample for ListMigrationSubtasks", + "file": "bigquerymigration_v2_generated_migration_service_list_migration_subtasks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_list_migration_subtasks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.list_migration_subtasks", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "ListMigrationSubtasks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationSubtasksPager", + "shortName": "list_migration_subtasks" + }, + "description": "Sample for ListMigrationSubtasks", + "file": "bigquerymigration_v2_generated_migration_service_list_migration_subtasks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_list_migration_subtasks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.list_migration_workflows", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "ListMigrationWorkflows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationWorkflowsAsyncPager", + "shortName": "list_migration_workflows" + }, + "description": "Sample for ListMigrationWorkflows", + "file": "bigquerymigration_v2_generated_migration_service_list_migration_workflows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_list_migration_workflows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.list_migration_workflows", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "ListMigrationWorkflows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationWorkflowsPager", + "shortName": "list_migration_workflows" + }, + "description": "Sample for ListMigrationWorkflows", + "file": "bigquerymigration_v2_generated_migration_service_list_migration_workflows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_list_migration_workflows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.start_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "StartMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.StartMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "start_migration_workflow" + }, + "description": "Sample for StartMigrationWorkflow", + "file": "bigquerymigration_v2_generated_migration_service_start_migration_workflow_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_start_migration_workflow_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.start_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "StartMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2.types.StartMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "start_migration_workflow" + }, + "description": "Sample for StartMigrationWorkflow", + "file": "bigquerymigration_v2_generated_migration_service_start_migration_workflow_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2_generated_migration_service_start_migration_workflow_sync.py" + } + ] +} diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json new file mode 100644 index 000000000000..2ac10c08fa7a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json @@ -0,0 +1,1315 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2alpha", + "version": "v2alpha" + } + ], + "language": "PYTHON", + "name": "google-cloud-bigquery-migration", + "version": "0.11.2" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.create_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "CreateMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.CreateMigrationWorkflowRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_workflow", + "type": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow", + "shortName": "create_migration_workflow" + }, + "description": "Sample for CreateMigrationWorkflow", + "file": "bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.create_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "CreateMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.CreateMigrationWorkflowRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_workflow", + "type": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow", + "shortName": "create_migration_workflow" + }, + "description": "Sample for CreateMigrationWorkflow", + "file": "bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.delete_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "DeleteMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.DeleteMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_migration_workflow" + }, + "description": "Sample for DeleteMigrationWorkflow", + "file": "bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.delete_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "DeleteMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.DeleteMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_migration_workflow" + }, + "description": "Sample for DeleteMigrationWorkflow", + "file": "bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.get_migration_subtask", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "GetMigrationSubtask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.GetMigrationSubtaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask", + "shortName": "get_migration_subtask" + }, + "description": "Sample for GetMigrationSubtask", + "file": "bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.get_migration_subtask", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "GetMigrationSubtask" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.GetMigrationSubtaskRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask", + "shortName": "get_migration_subtask" + }, + "description": "Sample for GetMigrationSubtask", + "file": "bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.get_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "GetMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.GetMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow", + "shortName": "get_migration_workflow" + }, + "description": "Sample for GetMigrationWorkflow", + "file": "bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.get_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "GetMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.GetMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow", + "shortName": "get_migration_workflow" + }, + "description": "Sample for GetMigrationWorkflow", + "file": "bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.list_migration_subtasks", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "ListMigrationSubtasks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationSubtasksAsyncPager", + "shortName": "list_migration_subtasks" + }, + "description": "Sample for ListMigrationSubtasks", + "file": "bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.list_migration_subtasks", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "ListMigrationSubtasks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationSubtasksPager", + "shortName": "list_migration_subtasks" + }, + "description": "Sample for ListMigrationSubtasks", + "file": "bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.list_migration_workflows", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "ListMigrationWorkflows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationWorkflowsAsyncPager", + "shortName": "list_migration_workflows" + }, + "description": "Sample for ListMigrationWorkflows", + "file": "bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.list_migration_workflows", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "ListMigrationWorkflows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationWorkflowsPager", + "shortName": "list_migration_workflows" + }, + "description": "Sample for ListMigrationWorkflows", + "file": "bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", + "shortName": "MigrationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.start_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "StartMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.StartMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "start_migration_workflow" + }, + "description": "Sample for StartMigrationWorkflow", + "file": "bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", + "shortName": "MigrationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.start_migration_workflow", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", + "shortName": "MigrationService" + }, + "shortName": "StartMigrationWorkflow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.StartMigrationWorkflowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "start_migration_workflow" + }, + "description": "Sample for StartMigrationWorkflow", + "file": "bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.SqlTranslationServiceAsyncClient", + "shortName": "SqlTranslationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.SqlTranslationServiceAsyncClient.translate_query", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService.TranslateQuery", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", + "shortName": "SqlTranslationService" + }, + "shortName": "TranslateQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "source_dialect", + "type": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest.SqlTranslationSourceDialect" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryResponse", + "shortName": "translate_query" + }, + "description": "Sample for TranslateQuery", + "file": "bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_migration_v2alpha.SqlTranslationServiceClient", + "shortName": "SqlTranslationServiceClient" + }, + "fullName": "google.cloud.bigquery_migration_v2alpha.SqlTranslationServiceClient.translate_query", + "method": { + "fullName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService.TranslateQuery", + "service": { + "fullName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", + "shortName": "SqlTranslationService" + }, + "shortName": "TranslateQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "source_dialect", + "type": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest.SqlTranslationSourceDialect" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryResponse", + "shortName": "translate_query" + }, + "description": "Sample for TranslateQuery", + "file": "bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_sync.py" + } + ] +} diff --git a/packages/google-cloud-bigquery-migration/scripts/decrypt-secrets.sh b/packages/google-cloud-bigquery-migration/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-bigquery-migration/scripts/fixup_bigquery_migration_v2_keywords.py b/packages/google-cloud-bigquery-migration/scripts/fixup_bigquery_migration_v2_keywords.py new file mode 100644 index 000000000000..9e82d3b28756 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/scripts/fixup_bigquery_migration_v2_keywords.py @@ -0,0 +1,182 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class bigquery_migrationCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_migration_workflow': ('parent', 'migration_workflow', ), + 'delete_migration_workflow': ('name', ), + 'get_migration_subtask': ('name', 'read_mask', ), + 'get_migration_workflow': ('name', 'read_mask', ), + 'list_migration_subtasks': ('parent', 'read_mask', 'page_size', 'page_token', 'filter', ), + 'list_migration_workflows': ('parent', 'read_mask', 'page_size', 'page_token', ), + 'start_migration_workflow': ('name', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=bigquery_migrationCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the bigquery_migration client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-bigquery-migration/scripts/fixup_bigquery_migration_v2alpha_keywords.py b/packages/google-cloud-bigquery-migration/scripts/fixup_bigquery_migration_v2alpha_keywords.py new file mode 100644 index 000000000000..58540ac7c31e --- /dev/null +++ b/packages/google-cloud-bigquery-migration/scripts/fixup_bigquery_migration_v2alpha_keywords.py @@ -0,0 +1,183 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class bigquery_migrationCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_migration_workflow': ('parent', 'migration_workflow', ), + 'delete_migration_workflow': ('name', ), + 'get_migration_subtask': ('name', 'read_mask', ), + 'get_migration_workflow': ('name', 'read_mask', ), + 'list_migration_subtasks': ('parent', 'read_mask', 'page_size', 'page_token', 'filter', ), + 'list_migration_workflows': ('parent', 'read_mask', 'page_size', 'page_token', ), + 'start_migration_workflow': ('name', ), + 'translate_query': ('parent', 'source_dialect', 'query', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=bigquery_migrationCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the bigquery_migration client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/readme_gen.py b/packages/google-cloud-bigquery-migration/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-bigquery-migration/setup.cfg b/packages/google-cloud-bigquery-migration/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-bigquery-migration/setup.py b/packages/google-cloud-bigquery-migration/setup.py new file mode 100644 index 000000000000..21480fcf36cf --- /dev/null +++ b/packages/google-cloud-bigquery-migration/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-bigquery-migration" + + +description = "Google Cloud Bigquery Migration API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/bigquery_migration/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-bigquery-migration/testing/.gitignore b/packages/google-cloud-bigquery-migration/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-bigquery-migration/testing/constraints-3.10.txt b/packages/google-cloud-bigquery-migration/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-migration/testing/constraints-3.11.txt b/packages/google-cloud-bigquery-migration/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-migration/testing/constraints-3.12.txt b/packages/google-cloud-bigquery-migration/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-migration/testing/constraints-3.7.txt b/packages/google-cloud-bigquery-migration/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-bigquery-migration/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-bigquery-migration/testing/constraints-3.8.txt b/packages/google-cloud-bigquery-migration/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-migration/testing/constraints-3.9.txt b/packages/google-cloud-bigquery-migration/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-migration/tests/__init__.py b/packages/google-cloud-bigquery-migration/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-migration/tests/unit/__init__.py b/packages/google-cloud-bigquery-migration/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/__init__.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/__init__.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py new file mode 100644 index 000000000000..11760baf07cb --- /dev/null +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py @@ -0,0 +1,3601 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import error_details_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.bigquery_migration_v2.services.migration_service import ( + MigrationServiceAsyncClient, + MigrationServiceClient, + pagers, + transports, +) +from google.cloud.bigquery_migration_v2.types import ( + migration_entities, + migration_error_details, + migration_metrics, + migration_service, + translation_config, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MigrationServiceClient._get_default_mtls_endpoint(None) is None + assert ( + MigrationServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MigrationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MigrationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MigrationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MigrationServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MigrationServiceClient, "grpc"), + (MigrationServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_migration_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("bigquerymigration.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MigrationServiceGrpcTransport, "grpc"), + (transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_migration_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MigrationServiceClient, "grpc"), + (MigrationServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_migration_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("bigquerymigration.googleapis.com:443") + + +def test_migration_service_client_get_transport_class(): + transport = MigrationServiceClient.get_transport_class() + available_transports = [ + transports.MigrationServiceGrpcTransport, + ] + assert transport in available_transports + + transport = MigrationServiceClient.get_transport_class("grpc") + assert transport == transports.MigrationServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + MigrationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceClient), +) +@mock.patch.object( + MigrationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceAsyncClient), +) +def test_migration_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MigrationServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MigrationServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MigrationServiceClient, + transports.MigrationServiceGrpcTransport, + "grpc", + "true", + ), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MigrationServiceClient, + transports.MigrationServiceGrpcTransport, + "grpc", + "false", + ), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + MigrationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceClient), +) +@mock.patch.object( + MigrationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_migration_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [MigrationServiceClient, MigrationServiceAsyncClient] +) +@mock.patch.object( + MigrationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceClient), +) +@mock.patch.object( + MigrationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceAsyncClient), +) +def test_migration_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_migration_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MigrationServiceClient, + transports.MigrationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_migration_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_migration_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.bigquery_migration_v2.services.migration_service.transports.MigrationServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MigrationServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MigrationServiceClient, + transports.MigrationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_migration_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "bigquerymigration.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="bigquerymigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.CreateMigrationWorkflowRequest, + dict, + ], +) +def test_create_migration_workflow(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow( + name="name_value", + display_name="display_name_value", + state=migration_entities.MigrationWorkflow.State.DRAFT, + ) + response = client.create_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.CreateMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationWorkflow) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == migration_entities.MigrationWorkflow.State.DRAFT + + +def test_create_migration_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + client.create_migration_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.CreateMigrationWorkflowRequest() + + +@pytest.mark.asyncio +async def test_create_migration_workflow_async( + transport: str = "grpc_asyncio", + request_type=migration_service.CreateMigrationWorkflowRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow( + name="name_value", + display_name="display_name_value", + state=migration_entities.MigrationWorkflow.State.DRAFT, + ) + ) + response = await client.create_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.CreateMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationWorkflow) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == migration_entities.MigrationWorkflow.State.DRAFT + + +@pytest.mark.asyncio +async def test_create_migration_workflow_async_from_dict(): + await test_create_migration_workflow_async(request_type=dict) + + +def test_create_migration_workflow_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.CreateMigrationWorkflowRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + call.return_value = migration_entities.MigrationWorkflow() + client.create_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_migration_workflow_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.CreateMigrationWorkflowRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow() + ) + await client.create_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_migration_workflow_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_migration_workflow( + parent="parent_value", + migration_workflow=migration_entities.MigrationWorkflow(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].migration_workflow + mock_val = migration_entities.MigrationWorkflow(name="name_value") + assert arg == mock_val + + +def test_create_migration_workflow_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_migration_workflow( + migration_service.CreateMigrationWorkflowRequest(), + parent="parent_value", + migration_workflow=migration_entities.MigrationWorkflow(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_migration_workflow_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_migration_workflow( + parent="parent_value", + migration_workflow=migration_entities.MigrationWorkflow(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].migration_workflow + mock_val = migration_entities.MigrationWorkflow(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_migration_workflow_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_migration_workflow( + migration_service.CreateMigrationWorkflowRequest(), + parent="parent_value", + migration_workflow=migration_entities.MigrationWorkflow(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.GetMigrationWorkflowRequest, + dict, + ], +) +def test_get_migration_workflow(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow( + name="name_value", + display_name="display_name_value", + state=migration_entities.MigrationWorkflow.State.DRAFT, + ) + response = client.get_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationWorkflow) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == migration_entities.MigrationWorkflow.State.DRAFT + + +def test_get_migration_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + client.get_migration_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationWorkflowRequest() + + +@pytest.mark.asyncio +async def test_get_migration_workflow_async( + transport: str = "grpc_asyncio", + request_type=migration_service.GetMigrationWorkflowRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow( + name="name_value", + display_name="display_name_value", + state=migration_entities.MigrationWorkflow.State.DRAFT, + ) + ) + response = await client.get_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationWorkflow) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == migration_entities.MigrationWorkflow.State.DRAFT + + +@pytest.mark.asyncio +async def test_get_migration_workflow_async_from_dict(): + await test_get_migration_workflow_async(request_type=dict) + + +def test_get_migration_workflow_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.GetMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + call.return_value = migration_entities.MigrationWorkflow() + client.get_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_migration_workflow_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.GetMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow() + ) + await client.get_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_migration_workflow_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_migration_workflow_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_migration_workflow( + migration_service.GetMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_migration_workflow_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_migration_workflow_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_migration_workflow( + migration_service.GetMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.ListMigrationWorkflowsRequest, + dict, + ], +) +def test_list_migration_workflows(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationWorkflowsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_migration_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationWorkflowsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationWorkflowsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_migration_workflows_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + client.list_migration_workflows() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationWorkflowsRequest() + + +@pytest.mark.asyncio +async def test_list_migration_workflows_async( + transport: str = "grpc_asyncio", + request_type=migration_service.ListMigrationWorkflowsRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationWorkflowsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_migration_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationWorkflowsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationWorkflowsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_migration_workflows_async_from_dict(): + await test_list_migration_workflows_async(request_type=dict) + + +def test_list_migration_workflows_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.ListMigrationWorkflowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + call.return_value = migration_service.ListMigrationWorkflowsResponse() + client.list_migration_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_migration_workflows_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.ListMigrationWorkflowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationWorkflowsResponse() + ) + await client.list_migration_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_migration_workflows_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationWorkflowsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_migration_workflows( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_migration_workflows_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_migration_workflows( + migration_service.ListMigrationWorkflowsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_migration_workflows_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationWorkflowsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationWorkflowsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_migration_workflows( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_migration_workflows_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_migration_workflows( + migration_service.ListMigrationWorkflowsRequest(), + parent="parent_value", + ) + + +def test_list_migration_workflows_pager(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[], + next_page_token="def", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_migration_workflows(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, migration_entities.MigrationWorkflow) for i in results) + + +def test_list_migration_workflows_pages(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[], + next_page_token="def", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + ), + RuntimeError, + ) + pages = list(client.list_migration_workflows(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_migration_workflows_async_pager(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[], + next_page_token="def", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_migration_workflows( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, migration_entities.MigrationWorkflow) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_migration_workflows_async_pages(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[], + next_page_token="def", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_migration_workflows(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.DeleteMigrationWorkflowRequest, + dict, + ], +) +def test_delete_migration_workflow(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.DeleteMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_migration_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + client.delete_migration_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.DeleteMigrationWorkflowRequest() + + +@pytest.mark.asyncio +async def test_delete_migration_workflow_async( + transport: str = "grpc_asyncio", + request_type=migration_service.DeleteMigrationWorkflowRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.DeleteMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_migration_workflow_async_from_dict(): + await test_delete_migration_workflow_async(request_type=dict) + + +def test_delete_migration_workflow_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.DeleteMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + call.return_value = None + client.delete_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_migration_workflow_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.DeleteMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_migration_workflow_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_migration_workflow_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_migration_workflow( + migration_service.DeleteMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_migration_workflow_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_migration_workflow_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_migration_workflow( + migration_service.DeleteMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.StartMigrationWorkflowRequest, + dict, + ], +) +def test_start_migration_workflow(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.start_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.StartMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_start_migration_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + client.start_migration_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.StartMigrationWorkflowRequest() + + +@pytest.mark.asyncio +async def test_start_migration_workflow_async( + transport: str = "grpc_asyncio", + request_type=migration_service.StartMigrationWorkflowRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.start_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.StartMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_start_migration_workflow_async_from_dict(): + await test_start_migration_workflow_async(request_type=dict) + + +def test_start_migration_workflow_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.StartMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + call.return_value = None + client.start_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_migration_workflow_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.StartMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.start_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_start_migration_workflow_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.start_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_start_migration_workflow_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_migration_workflow( + migration_service.StartMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_start_migration_workflow_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.start_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_start_migration_workflow_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.start_migration_workflow( + migration_service.StartMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.GetMigrationSubtaskRequest, + dict, + ], +) +def test_get_migration_subtask(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationSubtask( + name="name_value", + task_id="task_id_value", + type_="type__value", + state=migration_entities.MigrationSubtask.State.ACTIVE, + resource_error_count=2169, + ) + response = client.get_migration_subtask(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationSubtaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationSubtask) + assert response.name == "name_value" + assert response.task_id == "task_id_value" + assert response.type_ == "type__value" + assert response.state == migration_entities.MigrationSubtask.State.ACTIVE + assert response.resource_error_count == 2169 + + +def test_get_migration_subtask_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + client.get_migration_subtask() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationSubtaskRequest() + + +@pytest.mark.asyncio +async def test_get_migration_subtask_async( + transport: str = "grpc_asyncio", + request_type=migration_service.GetMigrationSubtaskRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationSubtask( + name="name_value", + task_id="task_id_value", + type_="type__value", + state=migration_entities.MigrationSubtask.State.ACTIVE, + resource_error_count=2169, + ) + ) + response = await client.get_migration_subtask(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationSubtaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationSubtask) + assert response.name == "name_value" + assert response.task_id == "task_id_value" + assert response.type_ == "type__value" + assert response.state == migration_entities.MigrationSubtask.State.ACTIVE + assert response.resource_error_count == 2169 + + +@pytest.mark.asyncio +async def test_get_migration_subtask_async_from_dict(): + await test_get_migration_subtask_async(request_type=dict) + + +def test_get_migration_subtask_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.GetMigrationSubtaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + call.return_value = migration_entities.MigrationSubtask() + client.get_migration_subtask(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_migration_subtask_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.GetMigrationSubtaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationSubtask() + ) + await client.get_migration_subtask(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_migration_subtask_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationSubtask() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_migration_subtask( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_migration_subtask_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_migration_subtask( + migration_service.GetMigrationSubtaskRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_migration_subtask_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationSubtask() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationSubtask() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_migration_subtask( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_migration_subtask_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_migration_subtask( + migration_service.GetMigrationSubtaskRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.ListMigrationSubtasksRequest, + dict, + ], +) +def test_list_migration_subtasks(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationSubtasksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_migration_subtasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationSubtasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationSubtasksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_migration_subtasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + client.list_migration_subtasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationSubtasksRequest() + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_async( + transport: str = "grpc_asyncio", + request_type=migration_service.ListMigrationSubtasksRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationSubtasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_migration_subtasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationSubtasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationSubtasksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_async_from_dict(): + await test_list_migration_subtasks_async(request_type=dict) + + +def test_list_migration_subtasks_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.ListMigrationSubtasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + call.return_value = migration_service.ListMigrationSubtasksResponse() + client.list_migration_subtasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.ListMigrationSubtasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationSubtasksResponse() + ) + await client.list_migration_subtasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_migration_subtasks_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationSubtasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_migration_subtasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_migration_subtasks_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_migration_subtasks( + migration_service.ListMigrationSubtasksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationSubtasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationSubtasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_migration_subtasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_migration_subtasks( + migration_service.ListMigrationSubtasksRequest(), + parent="parent_value", + ) + + +def test_list_migration_subtasks_pager(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[], + next_page_token="def", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_migration_subtasks(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, migration_entities.MigrationSubtask) for i in results) + + +def test_list_migration_subtasks_pages(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[], + next_page_token="def", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + ), + RuntimeError, + ) + pages = list(client.list_migration_subtasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_async_pager(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[], + next_page_token="def", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_migration_subtasks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, migration_entities.MigrationSubtask) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_async_pages(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[], + next_page_token="def", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_migration_subtasks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MigrationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MigrationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MigrationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MigrationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MigrationServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MigrationServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = MigrationServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MigrationServiceGrpcTransport, + ) + + +def test_migration_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_migration_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.bigquery_migration_v2.services.migration_service.transports.MigrationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_migration_workflow", + "get_migration_workflow", + "list_migration_workflows", + "delete_migration_workflow", + "start_migration_workflow", + "get_migration_subtask", + "list_migration_subtasks", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_migration_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.bigquery_migration_v2.services.migration_service.transports.MigrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MigrationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_migration_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.bigquery_migration_v2.services.migration_service.transports.MigrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MigrationServiceTransport() + adc.assert_called_once() + + +def test_migration_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MigrationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_migration_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_migration_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MigrationServiceGrpcTransport, grpc_helpers), + (transports.MigrationServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_migration_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "bigquerymigration.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="bigquerymigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_migration_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_migration_service_host_no_port(transport_name): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigquerymigration.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("bigquerymigration.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_migration_service_host_with_port(transport_name): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigquerymigration.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("bigquerymigration.googleapis.com:8000") + + +def test_migration_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MigrationServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_migration_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MigrationServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_migration_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_migration_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_migration_subtask_path(): + project = "squid" + location = "clam" + workflow = "whelk" + subtask = "octopus" + expected = "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}".format( + project=project, + location=location, + workflow=workflow, + subtask=subtask, + ) + actual = MigrationServiceClient.migration_subtask_path( + project, location, workflow, subtask + ) + assert expected == actual + + +def test_parse_migration_subtask_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "workflow": "cuttlefish", + "subtask": "mussel", + } + path = MigrationServiceClient.migration_subtask_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_migration_subtask_path(path) + assert expected == actual + + +def test_migration_workflow_path(): + project = "winkle" + location = "nautilus" + workflow = "scallop" + expected = "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, + location=location, + workflow=workflow, + ) + actual = MigrationServiceClient.migration_workflow_path(project, location, workflow) + assert expected == actual + + +def test_parse_migration_workflow_path(): + expected = { + "project": "abalone", + "location": "squid", + "workflow": "clam", + } + path = MigrationServiceClient.migration_workflow_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_migration_workflow_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MigrationServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = MigrationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MigrationServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = MigrationServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MigrationServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = MigrationServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = MigrationServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = MigrationServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MigrationServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = MigrationServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MigrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MigrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MigrationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MigrationServiceClient, transports.MigrationServiceGrpcTransport), + (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/__init__.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py new file mode 100644 index 000000000000..183e254facb7 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py @@ -0,0 +1,3603 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import error_details_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.bigquery_migration_v2alpha.services.migration_service import ( + MigrationServiceAsyncClient, + MigrationServiceClient, + pagers, + transports, +) +from google.cloud.bigquery_migration_v2alpha.types import ( + assessment_task, + migration_entities, + migration_error_details, + migration_metrics, + migration_service, + translation_task, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MigrationServiceClient._get_default_mtls_endpoint(None) is None + assert ( + MigrationServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MigrationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MigrationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MigrationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MigrationServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MigrationServiceClient, "grpc"), + (MigrationServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_migration_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("bigquerymigration.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MigrationServiceGrpcTransport, "grpc"), + (transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_migration_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MigrationServiceClient, "grpc"), + (MigrationServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_migration_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("bigquerymigration.googleapis.com:443") + + +def test_migration_service_client_get_transport_class(): + transport = MigrationServiceClient.get_transport_class() + available_transports = [ + transports.MigrationServiceGrpcTransport, + ] + assert transport in available_transports + + transport = MigrationServiceClient.get_transport_class("grpc") + assert transport == transports.MigrationServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + MigrationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceClient), +) +@mock.patch.object( + MigrationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceAsyncClient), +) +def test_migration_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MigrationServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MigrationServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MigrationServiceClient, + transports.MigrationServiceGrpcTransport, + "grpc", + "true", + ), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MigrationServiceClient, + transports.MigrationServiceGrpcTransport, + "grpc", + "false", + ), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + MigrationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceClient), +) +@mock.patch.object( + MigrationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_migration_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [MigrationServiceClient, MigrationServiceAsyncClient] +) +@mock.patch.object( + MigrationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceClient), +) +@mock.patch.object( + MigrationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceAsyncClient), +) +def test_migration_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_migration_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MigrationServiceClient, + transports.MigrationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_migration_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_migration_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.bigquery_migration_v2alpha.services.migration_service.transports.MigrationServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MigrationServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MigrationServiceClient, + transports.MigrationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_migration_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "bigquerymigration.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="bigquerymigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.CreateMigrationWorkflowRequest, + dict, + ], +) +def test_create_migration_workflow(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow( + name="name_value", + display_name="display_name_value", + state=migration_entities.MigrationWorkflow.State.DRAFT, + ) + response = client.create_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.CreateMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationWorkflow) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == migration_entities.MigrationWorkflow.State.DRAFT + + +def test_create_migration_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + client.create_migration_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.CreateMigrationWorkflowRequest() + + +@pytest.mark.asyncio +async def test_create_migration_workflow_async( + transport: str = "grpc_asyncio", + request_type=migration_service.CreateMigrationWorkflowRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow( + name="name_value", + display_name="display_name_value", + state=migration_entities.MigrationWorkflow.State.DRAFT, + ) + ) + response = await client.create_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.CreateMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationWorkflow) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == migration_entities.MigrationWorkflow.State.DRAFT + + +@pytest.mark.asyncio +async def test_create_migration_workflow_async_from_dict(): + await test_create_migration_workflow_async(request_type=dict) + + +def test_create_migration_workflow_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.CreateMigrationWorkflowRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + call.return_value = migration_entities.MigrationWorkflow() + client.create_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_migration_workflow_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.CreateMigrationWorkflowRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow() + ) + await client.create_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_migration_workflow_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_migration_workflow( + parent="parent_value", + migration_workflow=migration_entities.MigrationWorkflow(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].migration_workflow + mock_val = migration_entities.MigrationWorkflow(name="name_value") + assert arg == mock_val + + +def test_create_migration_workflow_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_migration_workflow( + migration_service.CreateMigrationWorkflowRequest(), + parent="parent_value", + migration_workflow=migration_entities.MigrationWorkflow(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_migration_workflow_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_migration_workflow( + parent="parent_value", + migration_workflow=migration_entities.MigrationWorkflow(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].migration_workflow + mock_val = migration_entities.MigrationWorkflow(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_migration_workflow_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_migration_workflow( + migration_service.CreateMigrationWorkflowRequest(), + parent="parent_value", + migration_workflow=migration_entities.MigrationWorkflow(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.GetMigrationWorkflowRequest, + dict, + ], +) +def test_get_migration_workflow(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow( + name="name_value", + display_name="display_name_value", + state=migration_entities.MigrationWorkflow.State.DRAFT, + ) + response = client.get_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationWorkflow) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == migration_entities.MigrationWorkflow.State.DRAFT + + +def test_get_migration_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + client.get_migration_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationWorkflowRequest() + + +@pytest.mark.asyncio +async def test_get_migration_workflow_async( + transport: str = "grpc_asyncio", + request_type=migration_service.GetMigrationWorkflowRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow( + name="name_value", + display_name="display_name_value", + state=migration_entities.MigrationWorkflow.State.DRAFT, + ) + ) + response = await client.get_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationWorkflow) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == migration_entities.MigrationWorkflow.State.DRAFT + + +@pytest.mark.asyncio +async def test_get_migration_workflow_async_from_dict(): + await test_get_migration_workflow_async(request_type=dict) + + +def test_get_migration_workflow_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.GetMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + call.return_value = migration_entities.MigrationWorkflow() + client.get_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_migration_workflow_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.GetMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow() + ) + await client.get_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_migration_workflow_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_migration_workflow_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_migration_workflow( + migration_service.GetMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_migration_workflow_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationWorkflow() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationWorkflow() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_migration_workflow_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_migration_workflow( + migration_service.GetMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.ListMigrationWorkflowsRequest, + dict, + ], +) +def test_list_migration_workflows(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationWorkflowsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_migration_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationWorkflowsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationWorkflowsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_migration_workflows_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + client.list_migration_workflows() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationWorkflowsRequest() + + +@pytest.mark.asyncio +async def test_list_migration_workflows_async( + transport: str = "grpc_asyncio", + request_type=migration_service.ListMigrationWorkflowsRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationWorkflowsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_migration_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationWorkflowsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationWorkflowsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_migration_workflows_async_from_dict(): + await test_list_migration_workflows_async(request_type=dict) + + +def test_list_migration_workflows_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.ListMigrationWorkflowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + call.return_value = migration_service.ListMigrationWorkflowsResponse() + client.list_migration_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_migration_workflows_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.ListMigrationWorkflowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationWorkflowsResponse() + ) + await client.list_migration_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_migration_workflows_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationWorkflowsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_migration_workflows( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_migration_workflows_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_migration_workflows( + migration_service.ListMigrationWorkflowsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_migration_workflows_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationWorkflowsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationWorkflowsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_migration_workflows( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_migration_workflows_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_migration_workflows( + migration_service.ListMigrationWorkflowsRequest(), + parent="parent_value", + ) + + +def test_list_migration_workflows_pager(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[], + next_page_token="def", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_migration_workflows(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, migration_entities.MigrationWorkflow) for i in results) + + +def test_list_migration_workflows_pages(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[], + next_page_token="def", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + ), + RuntimeError, + ) + pages = list(client.list_migration_workflows(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_migration_workflows_async_pager(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[], + next_page_token="def", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_migration_workflows( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, migration_entities.MigrationWorkflow) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_migration_workflows_async_pages(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_workflows), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[], + next_page_token="def", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationWorkflowsResponse( + migration_workflows=[ + migration_entities.MigrationWorkflow(), + migration_entities.MigrationWorkflow(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_migration_workflows(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.DeleteMigrationWorkflowRequest, + dict, + ], +) +def test_delete_migration_workflow(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.DeleteMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_migration_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + client.delete_migration_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.DeleteMigrationWorkflowRequest() + + +@pytest.mark.asyncio +async def test_delete_migration_workflow_async( + transport: str = "grpc_asyncio", + request_type=migration_service.DeleteMigrationWorkflowRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.DeleteMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_migration_workflow_async_from_dict(): + await test_delete_migration_workflow_async(request_type=dict) + + +def test_delete_migration_workflow_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.DeleteMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + call.return_value = None + client.delete_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_migration_workflow_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.DeleteMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_migration_workflow_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_migration_workflow_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_migration_workflow( + migration_service.DeleteMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_migration_workflow_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_migration_workflow_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_migration_workflow( + migration_service.DeleteMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.StartMigrationWorkflowRequest, + dict, + ], +) +def test_start_migration_workflow(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.start_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.StartMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_start_migration_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + client.start_migration_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.StartMigrationWorkflowRequest() + + +@pytest.mark.asyncio +async def test_start_migration_workflow_async( + transport: str = "grpc_asyncio", + request_type=migration_service.StartMigrationWorkflowRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.start_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.StartMigrationWorkflowRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_start_migration_workflow_async_from_dict(): + await test_start_migration_workflow_async(request_type=dict) + + +def test_start_migration_workflow_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.StartMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + call.return_value = None + client.start_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_migration_workflow_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.StartMigrationWorkflowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.start_migration_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_start_migration_workflow_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.start_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_start_migration_workflow_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_migration_workflow( + migration_service.StartMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_start_migration_workflow_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_migration_workflow), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.start_migration_workflow( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_start_migration_workflow_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.start_migration_workflow( + migration_service.StartMigrationWorkflowRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.GetMigrationSubtaskRequest, + dict, + ], +) +def test_get_migration_subtask(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationSubtask( + name="name_value", + task_id="task_id_value", + type_="type__value", + state=migration_entities.MigrationSubtask.State.ACTIVE, + resource_error_count=2169, + ) + response = client.get_migration_subtask(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationSubtaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationSubtask) + assert response.name == "name_value" + assert response.task_id == "task_id_value" + assert response.type_ == "type__value" + assert response.state == migration_entities.MigrationSubtask.State.ACTIVE + assert response.resource_error_count == 2169 + + +def test_get_migration_subtask_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + client.get_migration_subtask() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationSubtaskRequest() + + +@pytest.mark.asyncio +async def test_get_migration_subtask_async( + transport: str = "grpc_asyncio", + request_type=migration_service.GetMigrationSubtaskRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationSubtask( + name="name_value", + task_id="task_id_value", + type_="type__value", + state=migration_entities.MigrationSubtask.State.ACTIVE, + resource_error_count=2169, + ) + ) + response = await client.get_migration_subtask(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.GetMigrationSubtaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, migration_entities.MigrationSubtask) + assert response.name == "name_value" + assert response.task_id == "task_id_value" + assert response.type_ == "type__value" + assert response.state == migration_entities.MigrationSubtask.State.ACTIVE + assert response.resource_error_count == 2169 + + +@pytest.mark.asyncio +async def test_get_migration_subtask_async_from_dict(): + await test_get_migration_subtask_async(request_type=dict) + + +def test_get_migration_subtask_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.GetMigrationSubtaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + call.return_value = migration_entities.MigrationSubtask() + client.get_migration_subtask(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_migration_subtask_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.GetMigrationSubtaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationSubtask() + ) + await client.get_migration_subtask(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_migration_subtask_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationSubtask() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_migration_subtask( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_migration_subtask_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_migration_subtask( + migration_service.GetMigrationSubtaskRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_migration_subtask_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_migration_subtask), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_entities.MigrationSubtask() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_entities.MigrationSubtask() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_migration_subtask( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_migration_subtask_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_migration_subtask( + migration_service.GetMigrationSubtaskRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + migration_service.ListMigrationSubtasksRequest, + dict, + ], +) +def test_list_migration_subtasks(request_type, transport: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationSubtasksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_migration_subtasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationSubtasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationSubtasksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_migration_subtasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + client.list_migration_subtasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationSubtasksRequest() + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_async( + transport: str = "grpc_asyncio", + request_type=migration_service.ListMigrationSubtasksRequest, +): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationSubtasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_migration_subtasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == migration_service.ListMigrationSubtasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMigrationSubtasksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_async_from_dict(): + await test_list_migration_subtasks_async(request_type=dict) + + +def test_list_migration_subtasks_field_headers(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.ListMigrationSubtasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + call.return_value = migration_service.ListMigrationSubtasksResponse() + client.list_migration_subtasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_field_headers_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = migration_service.ListMigrationSubtasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationSubtasksResponse() + ) + await client.list_migration_subtasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_migration_subtasks_flattened(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationSubtasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_migration_subtasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_migration_subtasks_flattened_error(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_migration_subtasks( + migration_service.ListMigrationSubtasksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_flattened_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = migration_service.ListMigrationSubtasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + migration_service.ListMigrationSubtasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_migration_subtasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_flattened_error_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_migration_subtasks( + migration_service.ListMigrationSubtasksRequest(), + parent="parent_value", + ) + + +def test_list_migration_subtasks_pager(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[], + next_page_token="def", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_migration_subtasks(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, migration_entities.MigrationSubtask) for i in results) + + +def test_list_migration_subtasks_pages(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[], + next_page_token="def", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + ), + RuntimeError, + ) + pages = list(client.list_migration_subtasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_async_pager(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[], + next_page_token="def", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_migration_subtasks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, migration_entities.MigrationSubtask) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_migration_subtasks_async_pages(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_migration_subtasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + next_page_token="abc", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[], + next_page_token="def", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + ], + next_page_token="ghi", + ), + migration_service.ListMigrationSubtasksResponse( + migration_subtasks=[ + migration_entities.MigrationSubtask(), + migration_entities.MigrationSubtask(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_migration_subtasks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MigrationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MigrationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MigrationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MigrationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MigrationServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MigrationServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = MigrationServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MigrationServiceGrpcTransport, + ) + + +def test_migration_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_migration_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.bigquery_migration_v2alpha.services.migration_service.transports.MigrationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_migration_workflow", + "get_migration_workflow", + "list_migration_workflows", + "delete_migration_workflow", + "start_migration_workflow", + "get_migration_subtask", + "list_migration_subtasks", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_migration_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.bigquery_migration_v2alpha.services.migration_service.transports.MigrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MigrationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_migration_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.bigquery_migration_v2alpha.services.migration_service.transports.MigrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MigrationServiceTransport() + adc.assert_called_once() + + +def test_migration_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MigrationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_migration_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_migration_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MigrationServiceGrpcTransport, grpc_helpers), + (transports.MigrationServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_migration_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "bigquerymigration.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="bigquerymigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_migration_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_migration_service_host_no_port(transport_name): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigquerymigration.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("bigquerymigration.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_migration_service_host_with_port(transport_name): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigquerymigration.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("bigquerymigration.googleapis.com:8000") + + +def test_migration_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MigrationServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_migration_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MigrationServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_migration_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MigrationServiceGrpcTransport, + transports.MigrationServiceGrpcAsyncIOTransport, + ], +) +def test_migration_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_migration_subtask_path(): + project = "squid" + location = "clam" + workflow = "whelk" + subtask = "octopus" + expected = "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}".format( + project=project, + location=location, + workflow=workflow, + subtask=subtask, + ) + actual = MigrationServiceClient.migration_subtask_path( + project, location, workflow, subtask + ) + assert expected == actual + + +def test_parse_migration_subtask_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "workflow": "cuttlefish", + "subtask": "mussel", + } + path = MigrationServiceClient.migration_subtask_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_migration_subtask_path(path) + assert expected == actual + + +def test_migration_workflow_path(): + project = "winkle" + location = "nautilus" + workflow = "scallop" + expected = "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, + location=location, + workflow=workflow, + ) + actual = MigrationServiceClient.migration_workflow_path(project, location, workflow) + assert expected == actual + + +def test_parse_migration_workflow_path(): + expected = { + "project": "abalone", + "location": "squid", + "workflow": "clam", + } + path = MigrationServiceClient.migration_workflow_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_migration_workflow_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MigrationServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = MigrationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MigrationServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = MigrationServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MigrationServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = MigrationServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = MigrationServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = MigrationServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MigrationServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = MigrationServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MigrationServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MigrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MigrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MigrationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MigrationServiceClient, transports.MigrationServiceGrpcTransport), + (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py new file mode 100644 index 000000000000..03cbe2f14bd8 --- /dev/null +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py @@ -0,0 +1,1655 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.bigquery_migration_v2alpha.services.sql_translation_service import ( + SqlTranslationServiceAsyncClient, + SqlTranslationServiceClient, + transports, +) +from google.cloud.bigquery_migration_v2alpha.types import translation_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SqlTranslationServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SqlTranslationServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SqlTranslationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SqlTranslationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SqlTranslationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SqlTranslationServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SqlTranslationServiceClient, "grpc"), + (SqlTranslationServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_sql_translation_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("bigquerymigration.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SqlTranslationServiceGrpcTransport, "grpc"), + (transports.SqlTranslationServiceGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_sql_translation_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SqlTranslationServiceClient, "grpc"), + (SqlTranslationServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_sql_translation_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("bigquerymigration.googleapis.com:443") + + +def test_sql_translation_service_client_get_transport_class(): + transport = SqlTranslationServiceClient.get_transport_class() + available_transports = [ + transports.SqlTranslationServiceGrpcTransport, + ] + assert transport in available_transports + + transport = SqlTranslationServiceClient.get_transport_class("grpc") + assert transport == transports.SqlTranslationServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SqlTranslationServiceClient, + transports.SqlTranslationServiceGrpcTransport, + "grpc", + ), + ( + SqlTranslationServiceAsyncClient, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + SqlTranslationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SqlTranslationServiceClient), +) +@mock.patch.object( + SqlTranslationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SqlTranslationServiceAsyncClient), +) +def test_sql_translation_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SqlTranslationServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SqlTranslationServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + SqlTranslationServiceClient, + transports.SqlTranslationServiceGrpcTransport, + "grpc", + "true", + ), + ( + SqlTranslationServiceAsyncClient, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + SqlTranslationServiceClient, + transports.SqlTranslationServiceGrpcTransport, + "grpc", + "false", + ), + ( + SqlTranslationServiceAsyncClient, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + SqlTranslationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SqlTranslationServiceClient), +) +@mock.patch.object( + SqlTranslationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SqlTranslationServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_sql_translation_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SqlTranslationServiceClient, SqlTranslationServiceAsyncClient] +) +@mock.patch.object( + SqlTranslationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SqlTranslationServiceClient), +) +@mock.patch.object( + SqlTranslationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SqlTranslationServiceAsyncClient), +) +def test_sql_translation_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SqlTranslationServiceClient, + transports.SqlTranslationServiceGrpcTransport, + "grpc", + ), + ( + SqlTranslationServiceAsyncClient, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_sql_translation_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SqlTranslationServiceClient, + transports.SqlTranslationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SqlTranslationServiceAsyncClient, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_sql_translation_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_sql_translation_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.bigquery_migration_v2alpha.services.sql_translation_service.transports.SqlTranslationServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SqlTranslationServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SqlTranslationServiceClient, + transports.SqlTranslationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SqlTranslationServiceAsyncClient, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_sql_translation_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "bigquerymigration.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="bigquerymigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.TranslateQueryRequest, + dict, + ], +) +def test_translate_query(request_type, transport: str = "grpc"): + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.translate_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = translation_service.TranslateQueryResponse( + translation_job="translation_job_value", + translated_query="translated_query_value", + ) + response = client.translate_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == translation_service.TranslateQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.TranslateQueryResponse) + assert response.translation_job == "translation_job_value" + assert response.translated_query == "translated_query_value" + + +def test_translate_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.translate_query), "__call__") as call: + client.translate_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == translation_service.TranslateQueryRequest() + + +@pytest.mark.asyncio +async def test_translate_query_async( + transport: str = "grpc_asyncio", + request_type=translation_service.TranslateQueryRequest, +): + client = SqlTranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.translate_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + translation_service.TranslateQueryResponse( + translation_job="translation_job_value", + translated_query="translated_query_value", + ) + ) + response = await client.translate_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == translation_service.TranslateQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.TranslateQueryResponse) + assert response.translation_job == "translation_job_value" + assert response.translated_query == "translated_query_value" + + +@pytest.mark.asyncio +async def test_translate_query_async_from_dict(): + await test_translate_query_async(request_type=dict) + + +def test_translate_query_field_headers(): + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = translation_service.TranslateQueryRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.translate_query), "__call__") as call: + call.return_value = translation_service.TranslateQueryResponse() + client.translate_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_translate_query_field_headers_async(): + client = SqlTranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = translation_service.TranslateQueryRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.translate_query), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + translation_service.TranslateQueryResponse() + ) + await client.translate_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_translate_query_flattened(): + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.translate_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = translation_service.TranslateQueryResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.translate_query( + parent="parent_value", + source_dialect=translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA, + query="query_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].source_dialect + mock_val = ( + translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA + ) + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + + +def test_translate_query_flattened_error(): + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.translate_query( + translation_service.TranslateQueryRequest(), + parent="parent_value", + source_dialect=translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA, + query="query_value", + ) + + +@pytest.mark.asyncio +async def test_translate_query_flattened_async(): + client = SqlTranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.translate_query), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = translation_service.TranslateQueryResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + translation_service.TranslateQueryResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.translate_query( + parent="parent_value", + source_dialect=translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA, + query="query_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].source_dialect + mock_val = ( + translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA + ) + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_translate_query_flattened_error_async(): + client = SqlTranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.translate_query( + translation_service.TranslateQueryRequest(), + parent="parent_value", + source_dialect=translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA, + query="query_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SqlTranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SqlTranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SqlTranslationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SqlTranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SqlTranslationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SqlTranslationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SqlTranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SqlTranslationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SqlTranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SqlTranslationServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SqlTranslationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SqlTranslationServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SqlTranslationServiceGrpcTransport, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = SqlTranslationServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SqlTranslationServiceGrpcTransport, + ) + + +def test_sql_translation_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SqlTranslationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_sql_translation_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.bigquery_migration_v2alpha.services.sql_translation_service.transports.SqlTranslationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SqlTranslationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("translate_query",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_sql_translation_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.bigquery_migration_v2alpha.services.sql_translation_service.transports.SqlTranslationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SqlTranslationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_sql_translation_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.bigquery_migration_v2alpha.services.sql_translation_service.transports.SqlTranslationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SqlTranslationServiceTransport() + adc.assert_called_once() + + +def test_sql_translation_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SqlTranslationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SqlTranslationServiceGrpcTransport, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + ], +) +def test_sql_translation_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SqlTranslationServiceGrpcTransport, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + ], +) +def test_sql_translation_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SqlTranslationServiceGrpcTransport, grpc_helpers), + (transports.SqlTranslationServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_sql_translation_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "bigquerymigration.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="bigquerymigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SqlTranslationServiceGrpcTransport, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + ], +) +def test_sql_translation_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_sql_translation_service_host_no_port(transport_name): + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigquerymigration.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("bigquerymigration.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_sql_translation_service_host_with_port(transport_name): + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigquerymigration.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("bigquerymigration.googleapis.com:8000") + + +def test_sql_translation_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SqlTranslationServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_sql_translation_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SqlTranslationServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SqlTranslationServiceGrpcTransport, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + ], +) +def test_sql_translation_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SqlTranslationServiceGrpcTransport, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + ], +) +def test_sql_translation_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SqlTranslationServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = SqlTranslationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SqlTranslationServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SqlTranslationServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = SqlTranslationServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SqlTranslationServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SqlTranslationServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = SqlTranslationServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SqlTranslationServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = SqlTranslationServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = SqlTranslationServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SqlTranslationServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SqlTranslationServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = SqlTranslationServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SqlTranslationServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SqlTranslationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SqlTranslationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SqlTranslationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SqlTranslationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = SqlTranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SqlTranslationServiceClient, transports.SqlTranslationServiceGrpcTransport), + ( + SqlTranslationServiceAsyncClient, + transports.SqlTranslationServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-billing-budgets/.flake8 b/packages/google-cloud-billing-budgets/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-billing-budgets/.flake8 +++ b/packages/google-cloud-billing-budgets/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-billing-budgets/CHANGELOG.md b/packages/google-cloud-billing-budgets/CHANGELOG.md index 3184d6796087..8a18a5785d1f 100644 --- a/packages/google-cloud-billing-budgets/CHANGELOG.md +++ b/packages/google-cloud-billing-budgets/CHANGELOG.md @@ -1,4 +1,20 @@ # Changelog +## [1.12.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-budgets-v1.12.0...google-cloud-billing-budgets-v1.12.1) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + +## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-budgets-v1.11.1...google-cloud-billing-budgets-v1.12.0) (2023-08-31) + + +### Features + +* Added `enable_project_level_recipients` for project owner budget emails ([dd953ec](https://github.com/googleapis/google-cloud-python/commit/dd953ec8f3a6a31143a358573737e3f0d2fed9f2)) +* Added `scope` for project scope filter in `ListBudgetsRequest` ([dd953ec](https://github.com/googleapis/google-cloud-python/commit/dd953ec8f3a6a31143a358573737e3f0d2fed9f2)) +* Supported project-level-budgets ([dd953ec](https://github.com/googleapis/google-cloud-python/commit/dd953ec8f3a6a31143a358573737e3f0d2fed9f2)) + ## [1.11.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-budgets-v1.11.0...google-cloud-billing-budgets-v1.11.1) (2023-07-05) diff --git a/packages/google-cloud-billing-budgets/CONTRIBUTING.rst b/packages/google-cloud-billing-budgets/CONTRIBUTING.rst index 8f88b597d7a3..4c0e5324e123 100644 --- a/packages/google-cloud-billing-budgets/CONTRIBUTING.rst +++ b/packages/google-cloud-billing-budgets/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-billing-budgets/noxfile.py ********** diff --git a/packages/google-cloud-billing-budgets/MANIFEST.in b/packages/google-cloud-billing-budgets/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-billing-budgets/MANIFEST.in +++ b/packages/google-cloud-billing-budgets/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-billing-budgets/README.rst b/packages/google-cloud-billing-budgets/README.rst index cd7b9f8d8b62..0efccc361218 100644 --- a/packages/google-cloud-billing-budgets/README.rst +++ b/packages/google-cloud-billing-budgets/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-billing-budgets + pip install google-cloud-billing-budgets Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-billing-budgets + py -m venv + .\\Scripts\activate + pip install google-cloud-billing-budgets Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-billing-budgets/docs/conf.py b/packages/google-cloud-billing-budgets/docs/conf.py index b16fd456a55f..bdc30a12291f 100644 --- a/packages/google-cloud-billing-budgets/docs/conf.py +++ b/packages/google-cloud-billing-budgets/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py index 360a0d13ebdd..ac3bc60c3fb3 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py index 360a0d13ebdd..ac3bc60c3fb3 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py index 1b41817ac065..86625e21ccc2 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py @@ -350,6 +350,7 @@ async def update_budget( metadata: Sequence[Tuple[str, str]] = (), ) -> budget_model.Budget: r"""Updates a budget and returns the updated budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. Budget fields that are not exposed in this API will not be @@ -487,6 +488,7 @@ async def get_budget( metadata: Sequence[Tuple[str, str]] = (), ) -> budget_model.Budget: r"""Returns a budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -610,6 +612,7 @@ async def list_budgets( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBudgetsAsyncPager: r"""Returns a list of budgets for a billing account. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -663,6 +666,7 @@ async def sample_list_budgets(): Returns: google.cloud.billing.budgets_v1.services.budget_service.pagers.ListBudgetsAsyncPager: Response for ListBudgets + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py index dd4014b4d07a..acef83de4685 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py @@ -574,6 +574,7 @@ def update_budget( metadata: Sequence[Tuple[str, str]] = (), ) -> budget_model.Budget: r"""Updates a budget and returns the updated budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. Budget fields that are not exposed in this API will not be @@ -701,6 +702,7 @@ def get_budget( metadata: Sequence[Tuple[str, str]] = (), ) -> budget_model.Budget: r"""Returns a budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -814,6 +816,7 @@ def list_budgets( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBudgetsPager: r"""Returns a list of budgets for a billing account. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -867,6 +870,7 @@ def sample_list_budgets(): Returns: google.cloud.billing.budgets_v1.services.budget_service.pagers.ListBudgetsPager: Response for ListBudgets + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc.py index 13d2e26e16f3..cb645bf9f6b3 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc.py @@ -267,6 +267,7 @@ def update_budget( r"""Return a callable for the update budget method over gRPC. Updates a budget and returns the updated budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. Budget fields that are not exposed in this API will not be @@ -297,6 +298,7 @@ def get_budget( r"""Return a callable for the get budget method over gRPC. Returns a budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -330,6 +332,7 @@ def list_budgets( r"""Return a callable for the list budgets method over gRPC. Returns a list of budgets for a billing account. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc_asyncio.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc_asyncio.py index 60768b52bc66..b3ae7df90e1d 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc_asyncio.py @@ -270,6 +270,7 @@ def update_budget( r"""Return a callable for the update budget method over gRPC. Updates a budget and returns the updated budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. Budget fields that are not exposed in this API will not be @@ -300,6 +301,7 @@ def get_budget( r"""Return a callable for the get budget method over gRPC. Returns a budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -334,6 +336,7 @@ def list_budgets( r"""Return a callable for the list budgets method over gRPC. Returns a list of budgets for a billing account. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/types/budget_model.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/types/budget_model.py index e7e3cd7e1a99..d2ef77eecac4 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/types/budget_model.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/types/budget_model.py @@ -335,6 +335,14 @@ class NotificationsRule(proto.Message): Default notifications are sent to those with Billing Account Administrator and Billing Account User IAM roles for the target account. + enable_project_level_recipients (bool): + Optional. When set to true, and when the budget has a single + project configured, notifications will be sent to project + level recipients of that project. This field will be ignored + if the budget has multiple or no project configured. + + Currently, project level recipients are the users with + ``Owner`` role on a cloud project. """ pubsub_topic: str = proto.Field( @@ -353,6 +361,10 @@ class NotificationsRule(proto.Message): proto.BOOL, number=4, ) + enable_project_level_recipients: bool = proto.Field( + proto.BOOL, + number=5, + ) class Filter(proto.Message): @@ -378,9 +390,11 @@ class Filter(proto.Message): ``folders/{folderId}`` or ``organizations/{organizationId}``, specifying that usage from only this set of folders and organizations should be - included in the budget. If omitted, the report includes all - usage for all organizations, regardless of which - organization the usage occurred on. + included in the budget. If omitted, the budget includes all + usage that the billing account pays for. If the folder or + organization contains projects that are paid for by a + different Cloud Billing account, the budget *doesn't* apply + to those projects. credit_types (MutableSequence[str]): Optional. If [Filter.credit_types_treatment][google.cloud.billing.budgets.v1.Filter.credit_types_treatment] diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/types/budget_service.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/types/budget_service.py index 66a3959abb7f..283e8d986c07 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/types/budget_service.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/types/budget_service.py @@ -110,6 +110,18 @@ class ListBudgetsRequest(proto.Message): Required. Name of billing account to list budgets under. Values are of the form ``billingAccounts/{billingAccountId}``. + scope (str): + Optional. Set the scope of the budgets to be + returned, in the format of the resource name. + The scope of a budget is the cost that it + tracks, such as costs for a single project, or + the costs for all projects in a folder. Only + project scope (in the format of + "projects/project-id" or "projects/123") is + supported in this field. When this field is set + to a project's resource name, the budgets + returned are tracking the costs for that + project. page_size (int): Optional. The maximum number of budgets to return per page. The default and maximum value @@ -125,6 +137,10 @@ class ListBudgetsRequest(proto.Message): proto.STRING, number=1, ) + scope: str = proto.Field( + proto.STRING, + number=4, + ) page_size: int = proto.Field( proto.INT32, number=2, diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py index 360a0d13ebdd..ac3bc60c3fb3 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py index 1e9aa303fdc5..acb13947b2f3 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py @@ -315,6 +315,7 @@ async def update_budget( metadata: Sequence[Tuple[str, str]] = (), ) -> budget_model.Budget: r"""Updates a budget and returns the updated budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. Budget fields that are not exposed in this API will not be @@ -416,6 +417,7 @@ async def get_budget( metadata: Sequence[Tuple[str, str]] = (), ) -> budget_model.Budget: r"""Returns a budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -517,6 +519,7 @@ async def list_budgets( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBudgetsAsyncPager: r"""Returns a list of budgets for a billing account. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -562,6 +565,7 @@ async def sample_list_budgets(): Returns: google.cloud.billing.budgets_v1beta1.services.budget_service.pagers.ListBudgetsAsyncPager: Response for ListBudgets + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py index 15ca6a598c59..094e89ae1843 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py @@ -538,6 +538,7 @@ def update_budget( metadata: Sequence[Tuple[str, str]] = (), ) -> budget_model.Budget: r"""Updates a budget and returns the updated budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. Budget fields that are not exposed in this API will not be @@ -630,6 +631,7 @@ def get_budget( metadata: Sequence[Tuple[str, str]] = (), ) -> budget_model.Budget: r"""Returns a budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -722,6 +724,7 @@ def list_budgets( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBudgetsPager: r"""Returns a list of budgets for a billing account. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -767,6 +770,7 @@ def sample_list_budgets(): Returns: google.cloud.billing.budgets_v1beta1.services.budget_service.pagers.ListBudgetsPager: Response for ListBudgets + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc.py index f7733dfac92c..0a4398374c81 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc.py @@ -267,6 +267,7 @@ def update_budget( r"""Return a callable for the update budget method over gRPC. Updates a budget and returns the updated budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. Budget fields that are not exposed in this API will not be @@ -297,6 +298,7 @@ def get_budget( r"""Return a callable for the get budget method over gRPC. Returns a budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -330,6 +332,7 @@ def list_budgets( r"""Return a callable for the list budgets method over gRPC. Returns a list of budgets for a billing account. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc_asyncio.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc_asyncio.py index 36e266b8cb4d..9fd9a8f6e6c0 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc_asyncio.py @@ -270,6 +270,7 @@ def update_budget( r"""Return a callable for the update budget method over gRPC. Updates a budget and returns the updated budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. Budget fields that are not exposed in this API will not be @@ -300,6 +301,7 @@ def get_budget( r"""Return a callable for the get budget method over gRPC. Returns a budget. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in @@ -334,6 +336,7 @@ def list_budgets( r"""Return a callable for the list budgets method over gRPC. Returns a list of budgets for a billing account. + WARNING: There are some fields exposed on the Google Cloud Console that aren't available on this API. When reading from the API, you will not see these fields in diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/types/budget_model.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/types/budget_model.py index 80ff14edc322..168b5272497c 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/types/budget_model.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/types/budget_model.py @@ -305,6 +305,14 @@ class AllUpdatesRule(proto.Message): Default notifications are sent to those with Billing Account Administrator and Billing Account User IAM roles for the target account. + enable_project_level_recipients (bool): + Optional. When set to true, and when the budget has a single + project configured, notifications will be sent to project + level recipients of that project. This field will be ignored + if the budget has multiple or no project configured. + + Currently, project level recipients are the users with + ``Owner`` role on a cloud project. """ pubsub_topic: str = proto.Field( @@ -323,6 +331,10 @@ class AllUpdatesRule(proto.Message): proto.BOOL, number=4, ) + enable_project_level_recipients: bool = proto.Field( + proto.BOOL, + number=5, + ) class Filter(proto.Message): @@ -348,9 +360,11 @@ class Filter(proto.Message): ``folders/{folderId}`` or ``organizations/{organizationId}``, specifying that usage from only this set of folders and organizations should be - included in the budget. If omitted, the report includes all - usage for all organizations, regardless of which - organization the usage occurred on. + included in the budget. If omitted, the budget includes all + usage that the billing account pays for. If the folder or + organization contains projects that are paid for by a + different Cloud Billing account, the budget *doesn't* apply + to those projects. credit_types (MutableSequence[str]): Optional. If [Filter.credit_types_treatment][google.cloud.billing.budgets.v1beta1.Filter.credit_types_treatment] diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/types/budget_service.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/types/budget_service.py index d95d2df0552a..1bb28b391f9e 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/types/budget_service.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/types/budget_service.py @@ -110,6 +110,18 @@ class ListBudgetsRequest(proto.Message): Required. Name of billing account to list budgets under. Values are of the form ``billingAccounts/{billingAccountId}``. + scope (str): + Optional. Set the scope of the budgets to be + returned, in the format of the resource name. + The scope of a budget is the cost that it + tracks, such as costs for a single project, or + the costs for all projects in a folder. Only + project scope (in the format of + "projects/project-id" or "projects/123") is + supported in this field. When this field is set + to a project's resource name, the budgets + returned are tracking the costs for that + project. page_size (int): Optional. The maximum number of budgets to return per page. The default and maximum value @@ -125,6 +137,10 @@ class ListBudgetsRequest(proto.Message): proto.STRING, number=1, ) + scope: str = proto.Field( + proto.STRING, + number=4, + ) page_size: int = proto.Field( proto.INT32, number=2, diff --git a/packages/google-cloud-billing-budgets/noxfile.py b/packages/google-cloud-billing-budgets/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-billing-budgets/noxfile.py +++ b/packages/google-cloud-billing-budgets/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json index 9f6a8ee49f17..f9462fe28bfe 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "0.1.0" + "version": "1.12.1" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json index 33b9f5b1220a..e3bf697ba876 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "0.1.0" + "version": "1.12.1" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/scripts/decrypt-secrets.sh b/packages/google-cloud-billing-budgets/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-billing-budgets/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-billing-budgets/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-billing-budgets/scripts/fixup_budgets_v1_keywords.py b/packages/google-cloud-billing-budgets/scripts/fixup_budgets_v1_keywords.py index b3f9e74cb6bd..193bcf772084 100644 --- a/packages/google-cloud-billing-budgets/scripts/fixup_budgets_v1_keywords.py +++ b/packages/google-cloud-billing-budgets/scripts/fixup_budgets_v1_keywords.py @@ -42,7 +42,7 @@ class budgetsCallTransformer(cst.CSTTransformer): 'create_budget': ('parent', 'budget', ), 'delete_budget': ('name', ), 'get_budget': ('name', ), - 'list_budgets': ('parent', 'page_size', 'page_token', ), + 'list_budgets': ('parent', 'scope', 'page_size', 'page_token', ), 'update_budget': ('budget', 'update_mask', ), } diff --git a/packages/google-cloud-billing-budgets/scripts/fixup_budgets_v1beta1_keywords.py b/packages/google-cloud-billing-budgets/scripts/fixup_budgets_v1beta1_keywords.py index b3f9e74cb6bd..193bcf772084 100644 --- a/packages/google-cloud-billing-budgets/scripts/fixup_budgets_v1beta1_keywords.py +++ b/packages/google-cloud-billing-budgets/scripts/fixup_budgets_v1beta1_keywords.py @@ -42,7 +42,7 @@ class budgetsCallTransformer(cst.CSTTransformer): 'create_budget': ('parent', 'budget', ), 'delete_budget': ('name', ), 'get_budget': ('name', ), - 'list_budgets': ('parent', 'page_size', 'page_token', ), + 'list_budgets': ('parent', 'scope', 'page_size', 'page_token', ), 'update_budget': ('budget', 'update_mask', ), } diff --git a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py index efe244d9d5e7..70bb0b96338d 100644 --- a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py +++ b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py @@ -2131,6 +2131,7 @@ def test_create_budget_rest(request_type): "monitoring_notification_channels_value2", ], "disable_default_iam_recipients": True, + "enable_project_level_recipients": True, }, "etag": "etag_value", } @@ -2359,6 +2360,7 @@ def test_create_budget_rest_bad_request( "monitoring_notification_channels_value2", ], "disable_default_iam_recipients": True, + "enable_project_level_recipients": True, }, "etag": "etag_value", } @@ -2490,6 +2492,7 @@ def test_update_budget_rest(request_type): "monitoring_notification_channels_value2", ], "disable_default_iam_recipients": True, + "enable_project_level_recipients": True, }, "etag": "etag_value", } @@ -2707,6 +2710,7 @@ def test_update_budget_rest_bad_request( "monitoring_notification_channels_value2", ], "disable_default_iam_recipients": True, + "enable_project_level_recipients": True, }, "etag": "etag_value", } @@ -3128,6 +3132,7 @@ def test_list_budgets_rest_required_fields( ( "page_size", "page_token", + "scope", ) ) jsonified_request.update(unset_fields) @@ -3187,6 +3192,7 @@ def test_list_budgets_rest_unset_required_fields(): ( "pageSize", "pageToken", + "scope", ) ) & set(("parent",)) diff --git a/packages/google-cloud-billing/CHANGELOG.md b/packages/google-cloud-billing/CHANGELOG.md index bd6c3a6615c1..50dee8d4c0e9 100644 --- a/packages/google-cloud-billing/CHANGELOG.md +++ b/packages/google-cloud-billing/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [1.11.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-v1.11.3...google-cloud-billing-v1.11.4) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + +## [1.11.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-v1.11.2...google-cloud-billing-v1.11.3) (2023-08-31) + + +### Documentation + +* update comments ([#11598](https://github.com/googleapis/google-cloud-python/issues/11598)) ([aaa652e](https://github.com/googleapis/google-cloud-python/commit/aaa652ec54314e59c4343abef76a956b68fe8069)) + ## [1.11.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-v1.11.1...google-cloud-billing-v1.11.2) (2023-08-03) diff --git a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py index 787a045996f3..65cc8488e2bd 100644 --- a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.2" # {x-release-please-version} +__version__ = "1.11.4" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py index 787a045996f3..65cc8488e2bd 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.2" # {x-release-please-version} +__version__ = "1.11.4" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py index ddd066b8cacf..31b71701c7ea 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/async_client.py @@ -1017,7 +1017,7 @@ async def sample_update_project_billing_info(): on the ``request`` instance; if ``request`` is provided, this should not be set. project_billing_info (:class:`google.cloud.billing_v1.types.ProjectBillingInfo`): - The new billing information for the project. Read-only + The new billing information for the project. Output-only fields are ignored; thus, you can leave empty all fields except ``billing_account_name``. @@ -1140,8 +1140,8 @@ async def sample_get_iam_policy(): The request object. Request message for ``GetIamPolicy`` method. resource (:class:`str`): REQUIRED: The resource for which the - policy is being requested. - See the operation documentation for the + policy is being requested. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -1288,8 +1288,8 @@ async def sample_set_iam_policy(): The request object. Request message for ``SetIamPolicy`` method. resource (:class:`str`): REQUIRED: The resource for which the - policy is being specified. - See the operation documentation for the + policy is being specified. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -1438,8 +1438,8 @@ async def sample_test_iam_permissions(): The request object. Request message for ``TestIamPermissions`` method. resource (:class:`str`): REQUIRED: The resource for which the - policy detail is being requested. - See the operation documentation for the + policy detail is being requested. See + the operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py index 42526e3cd4f4..c7ae757b3117 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py @@ -1190,7 +1190,7 @@ def sample_update_project_billing_info(): on the ``request`` instance; if ``request`` is provided, this should not be set. project_billing_info (google.cloud.billing_v1.types.ProjectBillingInfo): - The new billing information for the project. Read-only + The new billing information for the project. Output-only fields are ignored; thus, you can leave empty all fields except ``billing_account_name``. @@ -1305,8 +1305,8 @@ def sample_get_iam_policy(): The request object. Request message for ``GetIamPolicy`` method. resource (str): REQUIRED: The resource for which the - policy is being requested. - See the operation documentation for the + policy is being requested. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -1440,8 +1440,8 @@ def sample_set_iam_policy(): The request object. Request message for ``SetIamPolicy`` method. resource (str): REQUIRED: The resource for which the - policy is being specified. - See the operation documentation for the + policy is being specified. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -1577,8 +1577,8 @@ def sample_test_iam_permissions(): The request object. Request message for ``TestIamPermissions`` method. resource (str): REQUIRED: The resource for which the - policy detail is being requested. - See the operation documentation for the + policy detail is being requested. See + the operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_billing.py b/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_billing.py index 397b78958d1c..99d4faec681b 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_billing.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/types/cloud_billing.py @@ -100,14 +100,13 @@ class ProjectBillingInfo(proto.Message): ``projects/{project_id}/billingInfo``. For example, the resource name for the billing information for project ``tokyo-rain-123`` would be - ``projects/tokyo-rain-123/billingInfo``. This field is - read-only. + ``projects/tokyo-rain-123/billingInfo``. project_id (str): Output only. The ID of the project that this ``ProjectBillingInfo`` represents, such as ``tokyo-rain-123``. This is a convenience field so that you don't need to parse the ``name`` field to obtain a project - ID. This field is read-only. + ID. billing_account_name (str): The resource name of the billing account associated with the project, if any. For example, @@ -118,8 +117,7 @@ class ProjectBillingInfo(proto.Message): which usage on the project is charged. False if the project is associated with a closed billing account, or no billing account at all, and - therefore cannot use paid services. This field - is read-only. + therefore cannot use paid services. """ name: str = proto.Field( @@ -356,7 +354,7 @@ class UpdateProjectBillingInfoRequest(proto.Message): the billing information that you want to update. For example, ``projects/tokyo-rain-123``. project_billing_info (google.cloud.billing_v1.types.ProjectBillingInfo): - The new billing information for the project. Read-only + The new billing information for the project. Output-only fields are ignored; thus, you can leave empty all fields except ``billing_account_name``. """ diff --git a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json index aba3e70f229c..7b101ec923cd 100644 --- a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json +++ b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing", - "version": "1.11.2" + "version": "1.11.4" }, "snippets": [ { diff --git a/packages/google-cloud-certificate-manager/.flake8 b/packages/google-cloud-certificate-manager/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-certificate-manager/.flake8 +++ b/packages/google-cloud-certificate-manager/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-certificate-manager/CHANGELOG.md b/packages/google-cloud-certificate-manager/CHANGELOG.md index c02423b6cfaf..3108f048d0d6 100644 --- a/packages/google-cloud-certificate-manager/CHANGELOG.md +++ b/packages/google-cloud-certificate-manager/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.4.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-certificate-manager-v1.4.3...google-cloud-certificate-manager-v1.4.4) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + ## [1.4.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-certificate-manager-v1.4.2...google-cloud-certificate-manager-v1.4.3) (2023-07-05) diff --git a/packages/google-cloud-certificate-manager/CONTRIBUTING.rst b/packages/google-cloud-certificate-manager/CONTRIBUTING.rst index 1e14e1aaebb1..8f313a624dbb 100644 --- a/packages/google-cloud-certificate-manager/CONTRIBUTING.rst +++ b/packages/google-cloud-certificate-manager/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-certificate-manager/noxfile.py ********** diff --git a/packages/google-cloud-certificate-manager/MANIFEST.in b/packages/google-cloud-certificate-manager/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-certificate-manager/MANIFEST.in +++ b/packages/google-cloud-certificate-manager/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-certificate-manager/README.rst b/packages/google-cloud-certificate-manager/README.rst index e5a3fc50fae1..3fd0171edbf7 100644 --- a/packages/google-cloud-certificate-manager/README.rst +++ b/packages/google-cloud-certificate-manager/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-certificate-manager + pip install google-cloud-certificate-manager Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-certificate-manager + py -m venv + .\\Scripts\activate + pip install google-cloud-certificate-manager Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-certificate-manager/docs/conf.py b/packages/google-cloud-certificate-manager/docs/conf.py index ed6b2dc4e67f..cc2cf925f903 100644 --- a/packages/google-cloud-certificate-manager/docs/conf.py +++ b/packages/google-cloud-certificate-manager/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py index 360a0d13ebdd..797818043c6a 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.4.4" # {x-release-please-version} diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py index 360a0d13ebdd..797818043c6a 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.4.4" # {x-release-please-version} diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py index f3842d109b70..d03e9d800aa8 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/async_client.py @@ -45,7 +45,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py index 207c716eeaf6..1016f742a4f6 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py @@ -49,7 +49,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/transports/rest.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/transports/rest.py index d6ad6e203cd2..40229e76efaa 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/transports/rest.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/transports/rest.py @@ -33,7 +33,6 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-certificate-manager/noxfile.py b/packages/google-cloud-certificate-manager/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-certificate-manager/noxfile.py +++ b/packages/google-cloud-certificate-manager/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json b/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json index 8721c43cdb0c..5f31a4054354 100644 --- a/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json +++ b/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-certificate-manager", - "version": "0.1.0" + "version": "1.4.4" }, "snippets": [ { diff --git a/packages/google-cloud-certificate-manager/scripts/decrypt-secrets.sh b/packages/google-cloud-certificate-manager/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-certificate-manager/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-certificate-manager/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py index cfcbf951fe48..a0d6c3b3bb54 100644 --- a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py +++ b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py @@ -42,7 +42,7 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-channel/.flake8 b/packages/google-cloud-channel/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-channel/.flake8 +++ b/packages/google-cloud-channel/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-channel/CHANGELOG.md b/packages/google-cloud-channel/CHANGELOG.md index b1b833b1b2c3..bdc7662ec089 100644 --- a/packages/google-cloud-channel/CHANGELOG.md +++ b/packages/google-cloud-channel/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [1.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.15.1...google-cloud-channel-v1.16.0) (2023-09-19) + + +### Features + +* Launch QueryEligibleBillingAccounts API ([8de7cc7](https://github.com/googleapis/google-cloud-python/commit/8de7cc7a4ad8f2968cf432b978f5f5234f427937)) +* mark ChannelPartnerGranularity as deprecated and offer alternatives ([8de7cc7](https://github.com/googleapis/google-cloud-python/commit/8de7cc7a4ad8f2968cf432b978f5f5234f427937)) + + +### Documentation + +* Add clarification for the additional_apks field of TestSetup ([8de7cc7](https://github.com/googleapis/google-cloud-python/commit/8de7cc7a4ad8f2968cf432b978f5f5234f427937)) + ## [1.15.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.15.0...google-cloud-channel-v1.15.1) (2023-07-05) diff --git a/packages/google-cloud-channel/CONTRIBUTING.rst b/packages/google-cloud-channel/CONTRIBUTING.rst index a456a5500661..a380d6a90991 100644 --- a/packages/google-cloud-channel/CONTRIBUTING.rst +++ b/packages/google-cloud-channel/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-channel/noxfile.py ********** diff --git a/packages/google-cloud-channel/MANIFEST.in b/packages/google-cloud-channel/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-channel/MANIFEST.in +++ b/packages/google-cloud-channel/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-channel/README.rst b/packages/google-cloud-channel/README.rst index 21842a592b15..d9f46b090efc 100644 --- a/packages/google-cloud-channel/README.rst +++ b/packages/google-cloud-channel/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-channel + pip install google-cloud-channel Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-channel + py -m venv + .\\Scripts\activate + pip install google-cloud-channel Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-channel/docs/conf.py b/packages/google-cloud-channel/docs/conf.py index 304c8680966e..099a91e3362c 100644 --- a/packages/google-cloud-channel/docs/conf.py +++ b/packages/google-cloud-channel/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-channel/google/cloud/channel/__init__.py b/packages/google-cloud-channel/google/cloud/channel/__init__.py index 61a6eb542f9d..1101aaed6b0d 100644 --- a/packages/google-cloud-channel/google/cloud/channel/__init__.py +++ b/packages/google-cloud-channel/google/cloud/channel/__init__.py @@ -30,6 +30,7 @@ from google.cloud.channel_v1.services.cloud_channel_service.client import ( CloudChannelServiceClient, ) +from google.cloud.channel_v1.types.billing_accounts import BillingAccount from google.cloud.channel_v1.types.channel_partner_links import ( ChannelPartnerLink, ChannelPartnerLinkState, @@ -109,6 +110,7 @@ from google.cloud.channel_v1.types.service import ( ActivateEntitlementRequest, BillableSku, + BillingAccountPurchaseInfo, CancelEntitlementRequest, ChangeOfferRequest, ChangeParametersRequest, @@ -166,9 +168,12 @@ ProvisionCloudIdentityRequest, PurchasableOffer, PurchasableSku, + QueryEligibleBillingAccountsRequest, + QueryEligibleBillingAccountsResponse, RegisterSubscriberRequest, RegisterSubscriberResponse, SkuGroup, + SkuPurchaseGroup, StartPaidServiceRequest, SuspendEntitlementRequest, TransferableOffer, @@ -193,6 +198,7 @@ "CloudChannelReportsServiceAsyncClient", "CloudChannelServiceClient", "CloudChannelServiceAsyncClient", + "BillingAccount", "ChannelPartnerLink", "ChannelPartnerLinkState", "ChannelPartnerLinkView", @@ -258,6 +264,7 @@ "RebillingBasis", "ActivateEntitlementRequest", "BillableSku", + "BillingAccountPurchaseInfo", "CancelEntitlementRequest", "ChangeOfferRequest", "ChangeParametersRequest", @@ -315,9 +322,12 @@ "ProvisionCloudIdentityRequest", "PurchasableOffer", "PurchasableSku", + "QueryEligibleBillingAccountsRequest", + "QueryEligibleBillingAccountsResponse", "RegisterSubscriberRequest", "RegisterSubscriberResponse", "SkuGroup", + "SkuPurchaseGroup", "StartPaidServiceRequest", "SuspendEntitlementRequest", "TransferableOffer", diff --git a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py index 360a0d13ebdd..725b83961de2 100644 --- a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/__init__.py b/packages/google-cloud-channel/google/cloud/channel_v1/__init__.py index 2be242d33041..14862e736408 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/__init__.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/__init__.py @@ -26,6 +26,7 @@ CloudChannelServiceAsyncClient, CloudChannelServiceClient, ) +from .types.billing_accounts import BillingAccount from .types.channel_partner_links import ( ChannelPartnerLink, ChannelPartnerLinkState, @@ -94,6 +95,7 @@ from .types.service import ( ActivateEntitlementRequest, BillableSku, + BillingAccountPurchaseInfo, CancelEntitlementRequest, ChangeOfferRequest, ChangeParametersRequest, @@ -151,9 +153,12 @@ ProvisionCloudIdentityRequest, PurchasableOffer, PurchasableSku, + QueryEligibleBillingAccountsRequest, + QueryEligibleBillingAccountsResponse, RegisterSubscriberRequest, RegisterSubscriberResponse, SkuGroup, + SkuPurchaseGroup, StartPaidServiceRequest, SuspendEntitlementRequest, TransferableOffer, @@ -176,6 +181,8 @@ "AdminUser", "AssociationInfo", "BillableSku", + "BillingAccount", + "BillingAccountPurchaseInfo", "CancelEntitlementRequest", "ChangeOfferRequest", "ChangeParametersRequest", @@ -278,6 +285,8 @@ "ProvisionedService", "PurchasableOffer", "PurchasableSku", + "QueryEligibleBillingAccountsRequest", + "QueryEligibleBillingAccountsResponse", "RebillingBasis", "RegisterSubscriberRequest", "RegisterSubscriberResponse", @@ -297,6 +306,7 @@ "Sku", "SkuGroup", "SkuGroupCondition", + "SkuPurchaseGroup", "StartPaidServiceRequest", "SubscriberEvent", "SuspendEntitlementRequest", diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_metadata.json b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_metadata.json index 447cd3dbbe62..065cd6e0652d 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_metadata.json +++ b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_metadata.json @@ -244,6 +244,11 @@ "provision_cloud_identity" ] }, + "QueryEligibleBillingAccounts": { + "methods": [ + "query_eligible_billing_accounts" + ] + }, "RegisterSubscriber": { "methods": [ "register_subscriber" @@ -489,6 +494,11 @@ "provision_cloud_identity" ] }, + "QueryEligibleBillingAccounts": { + "methods": [ + "query_eligible_billing_accounts" + ] + }, "RegisterSubscriber": { "methods": [ "register_subscriber" diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py index 360a0d13ebdd..725b83961de2 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py index 5d2a2ac14845..6ace2281b9f2 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/async_client.py @@ -44,7 +44,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.channel_v1.services.cloud_channel_reports_service import pagers from google.cloud.channel_v1.types import operations, reports_service diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py index 25a890911fbe..8ca64c07efa0 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py @@ -48,7 +48,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.channel_v1.services.cloud_channel_reports_service import pagers from google.cloud.channel_v1.types import operations, reports_service diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py index 3610394de4eb..31299bab18e0 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py @@ -44,7 +44,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import postal_address_pb2 # type: ignore @@ -99,6 +99,10 @@ class CloudChannelServiceAsyncClient: DEFAULT_ENDPOINT = CloudChannelServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = CloudChannelServiceClient.DEFAULT_MTLS_ENDPOINT + billing_account_path = staticmethod(CloudChannelServiceClient.billing_account_path) + parse_billing_account_path = staticmethod( + CloudChannelServiceClient.parse_billing_account_path + ) channel_partner_link_path = staticmethod( CloudChannelServiceClient.channel_partner_link_path ) @@ -621,8 +625,13 @@ async def create_customer( Possible error codes: - - PERMISSION_DENIED: The reseller account making the request is - different from the reseller account in the API request. + - PERMISSION_DENIED: + + - The reseller account making the request is different from + the reseller account in the API request. + - You are not authorized to create a customer. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: - Required request parameters are missing or invalid. @@ -922,8 +931,13 @@ async def import_customer( Possible error codes: - - PERMISSION_DENIED: The reseller account making the request is - different from the reseller account in the API request. + - PERMISSION_DENIED: + + - The reseller account making the request is different from + the reseller account in the API request. + - You are not authorized to import the customer. See + https://support.google.com/channelservices/answer/9759265 + - NOT_FOUND: Cloud Identity doesn't exist or was deleted. - INVALID_ARGUMENT: Required parameters are missing, or the auth_token is expired or invalid. @@ -1017,8 +1031,12 @@ async def provision_cloud_identity( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - You are not authorized to provision cloud identity id. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. - NOT_FOUND: The customer was not found. @@ -1368,6 +1386,9 @@ async def list_transferable_offers( generating auth token. - The reseller account making the request is different from the reseller account in the query. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 - INVALID_ARGUMENT: Required request parameters are missing or invalid. @@ -1565,8 +1586,13 @@ async def create_entitlement( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: - Required request parameters are missing or invalid. @@ -2563,8 +2589,13 @@ async def transfer_entitlements( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. - NOT_FOUND: The customer or offer resource was not found. @@ -3549,12 +3580,12 @@ async def create_customer_repricing_config( configs. Changes to the config may be immediate, but may take up to 24 hours. - There is a limit of ten configs for any - [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement] - or + [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement], + for any [RepricingConfig.effective_invoice_month][google.cloud.channel.v1.RepricingConfig.effective_invoice_month]. - The contained [CustomerRepricingConfig.repricing_config][google.cloud.channel.v1.CustomerRepricingConfig.repricing_config] - vaule must be different from the value used in the current + value must be different from the value used in the current config for a [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement]. @@ -4238,10 +4269,12 @@ async def create_channel_partner_repricing_config( configs. Changes to the config may be immediate, but may take up to 24 hours. - There is a limit of ten configs for any ChannelPartner or + [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement], + for any [RepricingConfig.effective_invoice_month][google.cloud.channel.v1.RepricingConfig.effective_invoice_month]. - The contained [ChannelPartnerRepricingConfig.repricing_config][google.cloud.channel.v1.ChannelPartnerRepricingConfig.repricing_config] - vaule must be different from the value used in the current + value must be different from the value used in the current config for a ChannelPartner. Possible Error Codes: @@ -4710,6 +4743,7 @@ async def sample_list_sku_groups(): Returns: google.cloud.channel_v1.services.cloud_channel_service.pagers.ListSkuGroupsAsyncPager: Response message for ListSkuGroups. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5048,6 +5082,7 @@ async def sample_list_products(): Returns: google.cloud.channel_v1.services.cloud_channel_service.pagers.ListProductsAsyncPager: Response message for ListProducts. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5139,6 +5174,7 @@ async def sample_list_skus(): Returns: google.cloud.channel_v1.services.cloud_channel_service.pagers.ListSkusAsyncPager: Response message for ListSkus. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5235,6 +5271,7 @@ async def sample_list_offers(): Returns: google.cloud.channel_v1.services.cloud_channel_service.pagers.ListOffersAsyncPager: Response message for ListOffers. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5399,8 +5436,13 @@ async def list_purchasable_offers( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. @@ -5491,6 +5533,102 @@ async def sample_list_purchasable_offers(): # Done; return the response. return response + async def query_eligible_billing_accounts( + self, + request: Optional[ + Union[service.QueryEligibleBillingAccountsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.QueryEligibleBillingAccountsResponse: + r"""Lists the billing accounts that are eligible to purchase + particular SKUs for a given customer. + + Possible error codes: + + - PERMISSION_DENIED: The customer doesn't belong to the + reseller. + - INVALID_ARGUMENT: Required request parameters are missing or + invalid. + + Return value: Based on the provided list of SKUs, returns a list + of SKU groups that must be purchased using the same billing + account and the billing accounts eligible to purchase each SKU + group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import channel_v1 + + async def sample_query_eligible_billing_accounts(): + # Create a client + client = channel_v1.CloudChannelServiceAsyncClient() + + # Initialize request argument(s) + request = channel_v1.QueryEligibleBillingAccountsRequest( + customer="customer_value", + skus=['skus_value1', 'skus_value2'], + ) + + # Make the request + response = await client.query_eligible_billing_accounts(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.channel_v1.types.QueryEligibleBillingAccountsRequest, dict]]): + The request object. Request message for + QueryEligibleBillingAccounts. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.channel_v1.types.QueryEligibleBillingAccountsResponse: + Response message for + QueryEligibleBillingAccounts. + + """ + # Create or coerce a protobuf request object. + request = service.QueryEligibleBillingAccountsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.query_eligible_billing_accounts, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("customer", request.customer),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def register_subscriber( self, request: Optional[Union[service.RegisterSubscriberRequest, dict]] = None, @@ -5757,6 +5895,7 @@ async def sample_list_subscribers(): Returns: google.cloud.channel_v1.services.cloud_channel_service.pagers.ListSubscribersAsyncPager: Response Message for ListSubscribers. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py index eca7de8a88d7..b7b6e1756644 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py @@ -48,7 +48,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import postal_address_pb2 # type: ignore @@ -216,6 +216,26 @@ def transport(self) -> CloudChannelServiceTransport: """ return self._transport + @staticmethod + def billing_account_path( + account: str, + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "accounts/{account}/billingAccounts/{billing_account}".format( + account=account, + billing_account=billing_account, + ) + + @staticmethod + def parse_billing_account_path(path: str) -> Dict[str, str]: + """Parses a billing_account path into its component segments.""" + m = re.match( + r"^accounts/(?P.+?)/billingAccounts/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def channel_partner_link_path( account: str, @@ -970,8 +990,13 @@ def create_customer( Possible error codes: - - PERMISSION_DENIED: The reseller account making the request is - different from the reseller account in the API request. + - PERMISSION_DENIED: + + - The reseller account making the request is different from + the reseller account in the API request. + - You are not authorized to create a customer. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: - Required request parameters are missing or invalid. @@ -1273,8 +1298,13 @@ def import_customer( Possible error codes: - - PERMISSION_DENIED: The reseller account making the request is - different from the reseller account in the API request. + - PERMISSION_DENIED: + + - The reseller account making the request is different from + the reseller account in the API request. + - You are not authorized to import the customer. See + https://support.google.com/channelservices/answer/9759265 + - NOT_FOUND: Cloud Identity doesn't exist or was deleted. - INVALID_ARGUMENT: Required parameters are missing, or the auth_token is expired or invalid. @@ -1369,8 +1399,12 @@ def provision_cloud_identity( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - You are not authorized to provision cloud identity id. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. - NOT_FOUND: The customer was not found. @@ -1723,6 +1757,9 @@ def list_transferable_offers( generating auth token. - The reseller account making the request is different from the reseller account in the query. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 - INVALID_ARGUMENT: Required request parameters are missing or invalid. @@ -1922,8 +1959,13 @@ def create_entitlement( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: - Required request parameters are missing or invalid. @@ -2928,8 +2970,13 @@ def transfer_entitlements( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. - NOT_FOUND: The customer or offer resource was not found. @@ -3932,12 +3979,12 @@ def create_customer_repricing_config( configs. Changes to the config may be immediate, but may take up to 24 hours. - There is a limit of ten configs for any - [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement] - or + [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement], + for any [RepricingConfig.effective_invoice_month][google.cloud.channel.v1.RepricingConfig.effective_invoice_month]. - The contained [CustomerRepricingConfig.repricing_config][google.cloud.channel.v1.CustomerRepricingConfig.repricing_config] - vaule must be different from the value used in the current + value must be different from the value used in the current config for a [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement]. @@ -4631,10 +4678,12 @@ def create_channel_partner_repricing_config( configs. Changes to the config may be immediate, but may take up to 24 hours. - There is a limit of ten configs for any ChannelPartner or + [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement], + for any [RepricingConfig.effective_invoice_month][google.cloud.channel.v1.RepricingConfig.effective_invoice_month]. - The contained [ChannelPartnerRepricingConfig.repricing_config][google.cloud.channel.v1.ChannelPartnerRepricingConfig.repricing_config] - vaule must be different from the value used in the current + value must be different from the value used in the current config for a ChannelPartner. Possible Error Codes: @@ -5113,6 +5162,7 @@ def sample_list_sku_groups(): Returns: google.cloud.channel_v1.services.cloud_channel_service.pagers.ListSkuGroupsPager: Response message for ListSkuGroups. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5454,6 +5504,7 @@ def sample_list_products(): Returns: google.cloud.channel_v1.services.cloud_channel_service.pagers.ListProductsPager: Response message for ListProducts. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5546,6 +5597,7 @@ def sample_list_skus(): Returns: google.cloud.channel_v1.services.cloud_channel_service.pagers.ListSkusPager: Response message for ListSkus. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5643,6 +5695,7 @@ def sample_list_offers(): Returns: google.cloud.channel_v1.services.cloud_channel_service.pagers.ListOffersPager: Response message for ListOffers. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5809,8 +5862,13 @@ def list_purchasable_offers( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. @@ -5902,6 +5960,105 @@ def sample_list_purchasable_offers(): # Done; return the response. return response + def query_eligible_billing_accounts( + self, + request: Optional[ + Union[service.QueryEligibleBillingAccountsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.QueryEligibleBillingAccountsResponse: + r"""Lists the billing accounts that are eligible to purchase + particular SKUs for a given customer. + + Possible error codes: + + - PERMISSION_DENIED: The customer doesn't belong to the + reseller. + - INVALID_ARGUMENT: Required request parameters are missing or + invalid. + + Return value: Based on the provided list of SKUs, returns a list + of SKU groups that must be purchased using the same billing + account and the billing accounts eligible to purchase each SKU + group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import channel_v1 + + def sample_query_eligible_billing_accounts(): + # Create a client + client = channel_v1.CloudChannelServiceClient() + + # Initialize request argument(s) + request = channel_v1.QueryEligibleBillingAccountsRequest( + customer="customer_value", + skus=['skus_value1', 'skus_value2'], + ) + + # Make the request + response = client.query_eligible_billing_accounts(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.channel_v1.types.QueryEligibleBillingAccountsRequest, dict]): + The request object. Request message for + QueryEligibleBillingAccounts. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.channel_v1.types.QueryEligibleBillingAccountsResponse: + Response message for + QueryEligibleBillingAccounts. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a service.QueryEligibleBillingAccountsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.QueryEligibleBillingAccountsRequest): + request = service.QueryEligibleBillingAccountsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.query_eligible_billing_accounts + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("customer", request.customer),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def register_subscriber( self, request: Optional[Union[service.RegisterSubscriberRequest, dict]] = None, @@ -6170,6 +6327,7 @@ def sample_list_subscribers(): Returns: google.cloud.channel_v1.services.cloud_channel_service.pagers.ListSubscribersPager: Response Message for ListSubscribers. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/base.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/base.py index 48de422d7afd..87fc9b273937 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/base.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/base.py @@ -351,6 +351,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.query_eligible_billing_accounts: gapic_v1.method.wrap_method( + self.query_eligible_billing_accounts, + default_timeout=None, + client_info=client_info, + ), self.register_subscriber: gapic_v1.method.wrap_method( self.register_subscriber, default_timeout=None, @@ -839,6 +844,18 @@ def list_purchasable_offers( ]: raise NotImplementedError() + @property + def query_eligible_billing_accounts( + self, + ) -> Callable[ + [service.QueryEligibleBillingAccountsRequest], + Union[ + service.QueryEligibleBillingAccountsResponse, + Awaitable[service.QueryEligibleBillingAccountsResponse], + ], + ]: + raise NotImplementedError() + @property def register_subscriber( self, diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc.py index d18b10e2d601..af23ec6b0894 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc.py @@ -412,8 +412,13 @@ def create_customer( Possible error codes: - - PERMISSION_DENIED: The reseller account making the request is - different from the reseller account in the API request. + - PERMISSION_DENIED: + + - The reseller account making the request is different from + the reseller account in the API request. + - You are not authorized to create a customer. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: - Required request parameters are missing or invalid. @@ -530,8 +535,13 @@ def import_customer( Possible error codes: - - PERMISSION_DENIED: The reseller account making the request is - different from the reseller account in the API request. + - PERMISSION_DENIED: + + - The reseller account making the request is different from + the reseller account in the API request. + - You are not authorized to import the customer. See + https://support.google.com/channelservices/answer/9759265 + - NOT_FOUND: Cloud Identity doesn't exist or was deleted. - INVALID_ARGUMENT: Required parameters are missing, or the auth_token is expired or invalid. @@ -569,8 +579,12 @@ def provision_cloud_identity( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - You are not authorized to provision cloud identity id. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. - NOT_FOUND: The customer was not found. @@ -720,6 +734,9 @@ def list_transferable_offers( generating auth token. - The reseller account making the request is different from the reseller account in the query. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 - INVALID_ARGUMENT: Required request parameters are missing or invalid. @@ -794,8 +811,13 @@ def create_entitlement( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: - Required request parameters are missing or invalid. @@ -1221,8 +1243,13 @@ def transfer_entitlements( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. - NOT_FOUND: The customer or offer resource was not found. @@ -1662,12 +1689,12 @@ def create_customer_repricing_config( configs. Changes to the config may be immediate, but may take up to 24 hours. - There is a limit of ten configs for any - [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement] - or + [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement], + for any [RepricingConfig.effective_invoice_month][google.cloud.channel.v1.RepricingConfig.effective_invoice_month]. - The contained [CustomerRepricingConfig.repricing_config][google.cloud.channel.v1.CustomerRepricingConfig.repricing_config] - vaule must be different from the value used in the current + value must be different from the value used in the current config for a [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement]. @@ -1952,10 +1979,12 @@ def create_channel_partner_repricing_config( configs. Changes to the config may be immediate, but may take up to 24 hours. - There is a limit of ten configs for any ChannelPartner or + [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement], + for any [RepricingConfig.effective_invoice_month][google.cloud.channel.v1.RepricingConfig.effective_invoice_month]. - The contained [ChannelPartnerRepricingConfig.repricing_config][google.cloud.channel.v1.ChannelPartnerRepricingConfig.repricing_config] - vaule must be different from the value used in the current + value must be different from the value used in the current config for a ChannelPartner. Possible Error Codes: @@ -2381,8 +2410,13 @@ def list_purchasable_offers( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. @@ -2404,6 +2438,51 @@ def list_purchasable_offers( ) return self._stubs["list_purchasable_offers"] + @property + def query_eligible_billing_accounts( + self, + ) -> Callable[ + [service.QueryEligibleBillingAccountsRequest], + service.QueryEligibleBillingAccountsResponse, + ]: + r"""Return a callable for the query eligible billing + accounts method over gRPC. + + Lists the billing accounts that are eligible to purchase + particular SKUs for a given customer. + + Possible error codes: + + - PERMISSION_DENIED: The customer doesn't belong to the + reseller. + - INVALID_ARGUMENT: Required request parameters are missing or + invalid. + + Return value: Based on the provided list of SKUs, returns a list + of SKU groups that must be purchased using the same billing + account and the billing accounts eligible to purchase each SKU + group. + + Returns: + Callable[[~.QueryEligibleBillingAccountsRequest], + ~.QueryEligibleBillingAccountsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_eligible_billing_accounts" not in self._stubs: + self._stubs[ + "query_eligible_billing_accounts" + ] = self.grpc_channel.unary_unary( + "/google.cloud.channel.v1.CloudChannelService/QueryEligibleBillingAccounts", + request_serializer=service.QueryEligibleBillingAccountsRequest.serialize, + response_deserializer=service.QueryEligibleBillingAccountsResponse.deserialize, + ) + return self._stubs["query_eligible_billing_accounts"] + @property def register_subscriber( self, diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc_asyncio.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc_asyncio.py index 81e4f786a99b..ad57afee6711 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc_asyncio.py @@ -419,8 +419,13 @@ def create_customer( Possible error codes: - - PERMISSION_DENIED: The reseller account making the request is - different from the reseller account in the API request. + - PERMISSION_DENIED: + + - The reseller account making the request is different from + the reseller account in the API request. + - You are not authorized to create a customer. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: - Required request parameters are missing or invalid. @@ -537,8 +542,13 @@ def import_customer( Possible error codes: - - PERMISSION_DENIED: The reseller account making the request is - different from the reseller account in the API request. + - PERMISSION_DENIED: + + - The reseller account making the request is different from + the reseller account in the API request. + - You are not authorized to import the customer. See + https://support.google.com/channelservices/answer/9759265 + - NOT_FOUND: Cloud Identity doesn't exist or was deleted. - INVALID_ARGUMENT: Required parameters are missing, or the auth_token is expired or invalid. @@ -578,8 +588,12 @@ def provision_cloud_identity( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - You are not authorized to provision cloud identity id. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. - NOT_FOUND: The customer was not found. @@ -733,6 +747,9 @@ def list_transferable_offers( generating auth token. - The reseller account making the request is different from the reseller account in the query. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 - INVALID_ARGUMENT: Required request parameters are missing or invalid. @@ -809,8 +826,13 @@ def create_entitlement( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: - Required request parameters are missing or invalid. @@ -1250,8 +1272,13 @@ def transfer_entitlements( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller. + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller. + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. - NOT_FOUND: The customer or offer resource was not found. @@ -1694,12 +1721,12 @@ def create_customer_repricing_config( configs. Changes to the config may be immediate, but may take up to 24 hours. - There is a limit of ten configs for any - [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement] - or + [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement], + for any [RepricingConfig.effective_invoice_month][google.cloud.channel.v1.RepricingConfig.effective_invoice_month]. - The contained [CustomerRepricingConfig.repricing_config][google.cloud.channel.v1.CustomerRepricingConfig.repricing_config] - vaule must be different from the value used in the current + value must be different from the value used in the current config for a [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement]. @@ -1986,10 +2013,12 @@ def create_channel_partner_repricing_config( configs. Changes to the config may be immediate, but may take up to 24 hours. - There is a limit of ten configs for any ChannelPartner or + [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement], + for any [RepricingConfig.effective_invoice_month][google.cloud.channel.v1.RepricingConfig.effective_invoice_month]. - The contained [ChannelPartnerRepricingConfig.repricing_config][google.cloud.channel.v1.ChannelPartnerRepricingConfig.repricing_config] - vaule must be different from the value used in the current + value must be different from the value used in the current config for a ChannelPartner. Possible Error Codes: @@ -2423,8 +2452,13 @@ def list_purchasable_offers( Possible error codes: - - PERMISSION_DENIED: The customer doesn't belong to the - reseller + - PERMISSION_DENIED: + + - The customer doesn't belong to the reseller + - The reseller is not authorized to transact on this + Product. See + https://support.google.com/channelservices/answer/9759265 + - INVALID_ARGUMENT: Required request parameters are missing or invalid. @@ -2446,6 +2480,51 @@ def list_purchasable_offers( ) return self._stubs["list_purchasable_offers"] + @property + def query_eligible_billing_accounts( + self, + ) -> Callable[ + [service.QueryEligibleBillingAccountsRequest], + Awaitable[service.QueryEligibleBillingAccountsResponse], + ]: + r"""Return a callable for the query eligible billing + accounts method over gRPC. + + Lists the billing accounts that are eligible to purchase + particular SKUs for a given customer. + + Possible error codes: + + - PERMISSION_DENIED: The customer doesn't belong to the + reseller. + - INVALID_ARGUMENT: Required request parameters are missing or + invalid. + + Return value: Based on the provided list of SKUs, returns a list + of SKU groups that must be purchased using the same billing + account and the billing accounts eligible to purchase each SKU + group. + + Returns: + Callable[[~.QueryEligibleBillingAccountsRequest], + Awaitable[~.QueryEligibleBillingAccountsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_eligible_billing_accounts" not in self._stubs: + self._stubs[ + "query_eligible_billing_accounts" + ] = self.grpc_channel.unary_unary( + "/google.cloud.channel.v1.CloudChannelService/QueryEligibleBillingAccounts", + request_serializer=service.QueryEligibleBillingAccountsRequest.serialize, + response_deserializer=service.QueryEligibleBillingAccountsResponse.deserialize, + ) + return self._stubs["query_eligible_billing_accounts"] + @property def register_subscriber( self, diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/__init__.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/__init__.py index d52c45298639..08c54de216cd 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/__init__.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/__init__.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .billing_accounts import BillingAccount from .channel_partner_links import ( ChannelPartnerLink, ChannelPartnerLinkState, @@ -81,6 +82,7 @@ from .service import ( ActivateEntitlementRequest, BillableSku, + BillingAccountPurchaseInfo, CancelEntitlementRequest, ChangeOfferRequest, ChangeParametersRequest, @@ -138,9 +140,12 @@ ProvisionCloudIdentityRequest, PurchasableOffer, PurchasableSku, + QueryEligibleBillingAccountsRequest, + QueryEligibleBillingAccountsResponse, RegisterSubscriberRequest, RegisterSubscriberResponse, SkuGroup, + SkuPurchaseGroup, StartPaidServiceRequest, SuspendEntitlementRequest, TransferableOffer, @@ -157,6 +162,7 @@ from .subscriber_event import CustomerEvent, EntitlementEvent, SubscriberEvent __all__ = ( + "BillingAccount", "ChannelPartnerLink", "ChannelPartnerLinkState", "ChannelPartnerLinkView", @@ -222,6 +228,7 @@ "RebillingBasis", "ActivateEntitlementRequest", "BillableSku", + "BillingAccountPurchaseInfo", "CancelEntitlementRequest", "ChangeOfferRequest", "ChangeParametersRequest", @@ -279,9 +286,12 @@ "ProvisionCloudIdentityRequest", "PurchasableOffer", "PurchasableSku", + "QueryEligibleBillingAccountsRequest", + "QueryEligibleBillingAccountsResponse", "RegisterSubscriberRequest", "RegisterSubscriberResponse", "SkuGroup", + "SkuPurchaseGroup", "StartPaidServiceRequest", "SuspendEntitlementRequest", "TransferableOffer", diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/billing_accounts.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/billing_accounts.py new file mode 100644 index 000000000000..221038844e02 --- /dev/null +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/billing_accounts.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.channel.v1", + manifest={ + "BillingAccount", + }, +) + + +class BillingAccount(proto.Message): + r"""Represents a billing account. + + Attributes: + name (str): + Output only. Resource name of the billing account. Format: + accounts/{account_id}/billingAccounts/{billing_account_id}. + display_name (str): + Display name of the billing account. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this billing + account was created. + currency_code (str): + Output only. The 3-letter currency code + defined in ISO 4217. + region_code (str): + Output only. The CLDR region code. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + currency_code: str = proto.Field( + proto.STRING, + number=4, + ) + region_code: str = proto.Field( + proto.STRING, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/customers.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/customers.py index 1a1a5bb1977d..0506d82f1998 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/customers.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/customers.py @@ -45,10 +45,10 @@ class Customer(proto.Message): org_postal_address (google.type.postal_address_pb2.PostalAddress): Required. The organization address for the customer. To enforce US laws and embargoes, we - require a region and zip code. You must provide - valid addresses for every customer. To set the - customer's language, use the Customer-level - language code. + require a region, postal code, and address + lines. You must provide valid addresses for + every customer. To set the customer's language, + use the Customer-level language code. primary_contact_info (google.cloud.channel_v1.types.ContactInfo): Primary contact info. alternate_email (str): diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/entitlements.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/entitlements.py index 869776787fa6..5aa62a555dc7 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/entitlements.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/entitlements.py @@ -275,7 +275,7 @@ class ProvisionedService(proto.Message): entitlement. For Google Workspace, this is the underlying Subscription ID. For Google Cloud, this is the Billing Account ID of the billing - subaccount.". + subaccount. product_id (str): Output only. The product pertaining to the provisioning resource as specified in the Offer. @@ -452,11 +452,16 @@ class Reason(proto.Enum): Reseller not eligible to sell the SKU. SKU_SUSPENDED (3): SKU subscription is suspended + CHANNEL_PARTNER_NOT_AUTHORIZED_FOR_SKU (4): + The reseller is not authorized to transact on + this Product. See + https://support.google.com/channelservices/answer/9759265 """ REASON_UNSPECIFIED = 0 PENDING_TOS_ACCEPTANCE = 1 SKU_NOT_ELIGIBLE = 2 SKU_SUSPENDED = 3 + CHANNEL_PARTNER_NOT_AUTHORIZED_FOR_SKU = 4 is_eligible: bool = proto.Field( proto.BOOL, diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/offers.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/offers.py index 5975fa678b8c..6ebcd6189a66 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/offers.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/offers.py @@ -294,11 +294,14 @@ class ParameterType(proto.Enum): String type. DOUBLE (3): Double type. + BOOLEAN (4): + Boolean type. """ PARAMETER_TYPE_UNSPECIFIED = 0 INT64 = 1 STRING = 2 DOUBLE = 3 + BOOLEAN = 4 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/reports_service.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/reports_service.py index 2c043e393701..09c1479e2f69 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/reports_service.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/reports_service.py @@ -437,11 +437,13 @@ class DateRange(proto.Message): ``usage_start_date_time`` in Pacific time. invoice_start_date (google.type.date_pb2.Date): The earliest invoice date (inclusive). + If this value is not the first day of a month, this will move it back to the first day of the given month. invoice_end_date (google.type.date_pb2.Date): The latest invoice date (inclusive). + If this value is not the last day of a month, this will move it forward to the last day of the given month. diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/repricing.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/repricing.py index 593058dd285b..71e1e6c2f5b6 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/repricing.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/repricing.py @@ -144,16 +144,31 @@ class RepricingConfig(proto.Message): Attributes: entitlement_granularity (google.cloud.channel_v1.types.RepricingConfig.EntitlementGranularity): - Applies the repricing configuration at the - entitlement level. This is the only supported - value for CustomerRepricingConfig. + Applies the repricing configuration at the entitlement + level. + + Note: If a + [ChannelPartnerRepricingConfig][google.cloud.channel.v1.ChannelPartnerRepricingConfig] + using + [RepricingConfig.EntitlementGranularity][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity] + becomes effective, then no existing or future + [RepricingConfig.ChannelPartnerGranularity][google.cloud.channel.v1.RepricingConfig.ChannelPartnerGranularity] + will apply to the + [RepricingConfig.EntitlementGranularity.entitlement][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity.entitlement]. + This is the recommended value for both + [CustomerRepricingConfig][google.cloud.channel.v1.CustomerRepricingConfig] + and + [ChannelPartnerRepricingConfig][google.cloud.channel.v1.ChannelPartnerRepricingConfig]. This field is a member of `oneof`_ ``granularity``. channel_partner_granularity (google.cloud.channel_v1.types.RepricingConfig.ChannelPartnerGranularity): - Applies the repricing configuration at the - channel partner level. This is the only - supported value for - ChannelPartnerRepricingConfig. + Applies the repricing configuration at the channel partner + level. Only + [ChannelPartnerRepricingConfig][google.cloud.channel.v1.ChannelPartnerRepricingConfig] + supports this value. Deprecated: This is no longer + supported. Use + [RepricingConfig.entitlement_granularity][google.cloud.channel.v1.RepricingConfig.entitlement_granularity] + instead. This field is a member of `oneof`_ ``granularity``. effective_invoice_month (google.type.date_pb2.Date): @@ -191,9 +206,11 @@ class EntitlementGranularity(proto.Message): ) class ChannelPartnerGranularity(proto.Message): - r"""Applies the repricing configuration at the channel partner - level. The channel partner value is derived from the resource - name. Takes an empty json object. + r"""Applies the repricing configuration at the channel partner level. + The channel partner value is derived from the resource name. Takes + an empty json object. Deprecated: This is no longer supported. Use + [RepricingConfig.EntitlementGranularity][google.cloud.channel.v1.RepricingConfig.EntitlementGranularity] + instead. """ @@ -261,6 +278,7 @@ class PercentageAdjustment(proto.Message): percentage (google.type.decimal_pb2.Decimal): The percentage of the bill to adjust. For example: + Mark down by 1% => "-1.00" Mark up by 1% => "1.00" Pass-Through => "0.00". diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py index 11c319e89093..57b838197403 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py @@ -24,6 +24,7 @@ channel_partner_links as gcc_channel_partner_links, ) from google.cloud.channel_v1.types import entitlement_changes as gcc_entitlement_changes +from google.cloud.channel_v1.types import billing_accounts from google.cloud.channel_v1.types import common from google.cloud.channel_v1.types import customers as gcc_customers from google.cloud.channel_v1.types import entitlements as gcc_entitlements @@ -100,6 +101,10 @@ "ListPurchasableOffersRequest", "ListPurchasableOffersResponse", "PurchasableOffer", + "QueryEligibleBillingAccountsRequest", + "QueryEligibleBillingAccountsResponse", + "SkuPurchaseGroup", + "BillingAccountPurchaseInfo", "RegisterSubscriberRequest", "RegisterSubscriberResponse", "UnregisterSubscriberRequest", @@ -694,6 +699,12 @@ class ListTransferableOffersRequest(proto.Message): example, "en-US". The response will localize in the corresponding language code, if specified. The default value is "en-US". + billing_account (str): + Optional. The Billing Account to look up Offers for. Format: + accounts/{account_id}/billingAccounts/{billing_account_id}. + + This field is only relevant for multi-currency accounts. It + should be left empty for single currency accounts. """ cloud_identity_id: str = proto.Field( @@ -726,6 +737,10 @@ class ListTransferableOffersRequest(proto.Message): proto.STRING, number=7, ) + billing_account: str = proto.Field( + proto.STRING, + number=8, + ) class ListTransferableOffersResponse(proto.Message): @@ -1432,8 +1447,10 @@ class BillableSku(proto.Message): Attributes: sku (str): Resource name of Billable SKU. Format: + billableSkus/{sku}. Example: + billableSkus/6E1B-6634-470F". sku_display_name (str): Unique human readable name for the SKU. @@ -1441,6 +1458,7 @@ class BillableSku(proto.Message): Resource name of Service which contains Repricing SKU. Format: services/{service}. Example: + "services/B7D9-FDCB-15D8". service_display_name (str): Unique human readable name for the Service. @@ -1757,6 +1775,14 @@ class ChangeOfferRequest(proto.Message): `UUID `__ with the exception that zero UUID is not supported (``00000000-0000-0000-0000-000000000000``). + billing_account (str): + Optional. The billing account resource name + that is used to pay for this entitlement when + setting up billing on a trial subscription. + + This field is only relevant for multi-currency + accounts. It should be left empty for single + currency accounts. """ name: str = proto.Field( @@ -1780,6 +1806,10 @@ class ChangeOfferRequest(proto.Message): proto.STRING, number=6, ) + billing_account: str = proto.Field( + proto.STRING, + number=7, + ) class StartPaidServiceRequest(proto.Message): @@ -2381,12 +2411,20 @@ class CreateEntitlementPurchase(proto.Message): sku (str): Required. SKU that the result should be restricted to. Format: products/{product_id}/skus/{sku_id}. + billing_account (str): + Optional. Billing account that the result should be + restricted to. Format: + accounts/{account_id}/billingAccounts/{billing_account_id}. """ sku: str = proto.Field( proto.STRING, number=1, ) + billing_account: str = proto.Field( + proto.STRING, + number=2, + ) class ChangeOfferPurchase(proto.Message): r"""List Offers for ChangeOffer purchase. @@ -2399,6 +2437,14 @@ class ChangeOfferPurchase(proto.Message): Optional. Resource name of the new target SKU. Provide this SKU when upgrading or downgrading an entitlement. Format: products/{product_id}/skus/{sku_id} + billing_account (str): + Optional. Resource name of the new target Billing Account. + Provide this Billing Account when setting up billing for a + trial subscription. Format: + accounts/{account_id}/billingAccounts/{billing_account_id}. + + This field is only relevant for multi-currency accounts. It + should be left empty for single currency accounts. """ entitlement: str = proto.Field( @@ -2409,6 +2455,10 @@ class ChangeOfferPurchase(proto.Message): proto.STRING, number=2, ) + billing_account: str = proto.Field( + proto.STRING, + number=3, + ) create_entitlement_purchase: CreateEntitlementPurchase = proto.Field( proto.MESSAGE, @@ -2481,6 +2531,91 @@ class PurchasableOffer(proto.Message): ) +class QueryEligibleBillingAccountsRequest(proto.Message): + r"""Request message for QueryEligibleBillingAccounts. + + Attributes: + customer (str): + Required. The resource name of the customer to list eligible + billing accounts for. Format: + accounts/{account_id}/customers/{customer_id}. + skus (MutableSequence[str]): + Required. List of SKUs to list eligible billing accounts + for. At least one SKU is required. Format: + products/{product_id}/skus/{sku_id}. + """ + + customer: str = proto.Field( + proto.STRING, + number=1, + ) + skus: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class QueryEligibleBillingAccountsResponse(proto.Message): + r"""Response message for QueryEligibleBillingAccounts. + + Attributes: + sku_purchase_groups (MutableSequence[google.cloud.channel_v1.types.SkuPurchaseGroup]): + List of SKU purchase groups where each group represents a + set of SKUs that must be purchased using the same billing + account. Each SKU from + [QueryEligibleBillingAccountsRequest.skus] will appear in + exactly one SKU group. + """ + + sku_purchase_groups: MutableSequence["SkuPurchaseGroup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SkuPurchaseGroup", + ) + + +class SkuPurchaseGroup(proto.Message): + r"""Represents a set of SKUs that must be purchased using the + same billing account. + + Attributes: + skus (MutableSequence[str]): + Resource names of the SKUs included in this group. Format: + products/{product_id}/skus/{sku_id}. + billing_account_purchase_infos (MutableSequence[google.cloud.channel_v1.types.BillingAccountPurchaseInfo]): + List of billing accounts that are eligible to + purhcase these SKUs. + """ + + skus: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + billing_account_purchase_infos: MutableSequence[ + "BillingAccountPurchaseInfo" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="BillingAccountPurchaseInfo", + ) + + +class BillingAccountPurchaseInfo(proto.Message): + r"""Represents a billing account that can be used to make a + purchase. + + Attributes: + billing_account (google.cloud.channel_v1.types.BillingAccount): + The billing account resource. + """ + + billing_account: billing_accounts.BillingAccount = proto.Field( + proto.MESSAGE, + number=1, + message=billing_accounts.BillingAccount, + ) + + class RegisterSubscriberRequest(proto.Message): r"""Request Message for RegisterSubscriber. diff --git a/packages/google-cloud-channel/noxfile.py b/packages/google-cloud-channel/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-channel/noxfile.py +++ b/packages/google-cloud-channel/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_query_eligible_billing_accounts_async.py b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_query_eligible_billing_accounts_async.py new file mode 100644 index 000000000000..25d857f595c3 --- /dev/null +++ b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_query_eligible_billing_accounts_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryEligibleBillingAccounts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-channel + + +# [START cloudchannel_v1_generated_CloudChannelService_QueryEligibleBillingAccounts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import channel_v1 + + +async def sample_query_eligible_billing_accounts(): + # Create a client + client = channel_v1.CloudChannelServiceAsyncClient() + + # Initialize request argument(s) + request = channel_v1.QueryEligibleBillingAccountsRequest( + customer="customer_value", + skus=['skus_value1', 'skus_value2'], + ) + + # Make the request + response = await client.query_eligible_billing_accounts(request=request) + + # Handle the response + print(response) + +# [END cloudchannel_v1_generated_CloudChannelService_QueryEligibleBillingAccounts_async] diff --git a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_query_eligible_billing_accounts_sync.py b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_query_eligible_billing_accounts_sync.py new file mode 100644 index 000000000000..63ab9f4e634a --- /dev/null +++ b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_query_eligible_billing_accounts_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryEligibleBillingAccounts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-channel + + +# [START cloudchannel_v1_generated_CloudChannelService_QueryEligibleBillingAccounts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import channel_v1 + + +def sample_query_eligible_billing_accounts(): + # Create a client + client = channel_v1.CloudChannelServiceClient() + + # Initialize request argument(s) + request = channel_v1.QueryEligibleBillingAccountsRequest( + customer="customer_value", + skus=['skus_value1', 'skus_value2'], + ) + + # Make the request + response = client.query_eligible_billing_accounts(request=request) + + # Handle the response + print(response) + +# [END cloudchannel_v1_generated_CloudChannelService_QueryEligibleBillingAccounts_sync] diff --git a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json index 4670b7c7da89..e274e9df8fbd 100644 --- a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json +++ b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-channel", - "version": "0.1.0" + "version": "1.16.0" }, "snippets": [ { @@ -6402,6 +6402,159 @@ ], "title": "cloudchannel_v1_generated_cloud_channel_service_provision_cloud_identity_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.channel_v1.CloudChannelServiceAsyncClient", + "shortName": "CloudChannelServiceAsyncClient" + }, + "fullName": "google.cloud.channel_v1.CloudChannelServiceAsyncClient.query_eligible_billing_accounts", + "method": { + "fullName": "google.cloud.channel.v1.CloudChannelService.QueryEligibleBillingAccounts", + "service": { + "fullName": "google.cloud.channel.v1.CloudChannelService", + "shortName": "CloudChannelService" + }, + "shortName": "QueryEligibleBillingAccounts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.channel_v1.types.QueryEligibleBillingAccountsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.channel_v1.types.QueryEligibleBillingAccountsResponse", + "shortName": "query_eligible_billing_accounts" + }, + "description": "Sample for QueryEligibleBillingAccounts", + "file": "cloudchannel_v1_generated_cloud_channel_service_query_eligible_billing_accounts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudchannel_v1_generated_CloudChannelService_QueryEligibleBillingAccounts_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudchannel_v1_generated_cloud_channel_service_query_eligible_billing_accounts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.channel_v1.CloudChannelServiceClient", + "shortName": "CloudChannelServiceClient" + }, + "fullName": "google.cloud.channel_v1.CloudChannelServiceClient.query_eligible_billing_accounts", + "method": { + "fullName": "google.cloud.channel.v1.CloudChannelService.QueryEligibleBillingAccounts", + "service": { + "fullName": "google.cloud.channel.v1.CloudChannelService", + "shortName": "CloudChannelService" + }, + "shortName": "QueryEligibleBillingAccounts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.channel_v1.types.QueryEligibleBillingAccountsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.channel_v1.types.QueryEligibleBillingAccountsResponse", + "shortName": "query_eligible_billing_accounts" + }, + "description": "Sample for QueryEligibleBillingAccounts", + "file": "cloudchannel_v1_generated_cloud_channel_service_query_eligible_billing_accounts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudchannel_v1_generated_CloudChannelService_QueryEligibleBillingAccounts_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudchannel_v1_generated_cloud_channel_service_query_eligible_billing_accounts_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-channel/scripts/decrypt-secrets.sh b/packages/google-cloud-channel/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-channel/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-channel/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py index b7de4f052304..c46878b79a71 100644 --- a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py +++ b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py @@ -41,7 +41,7 @@ class channelCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'activate_entitlement': ('name', 'request_id', ), 'cancel_entitlement': ('name', 'request_id', ), - 'change_offer': ('name', 'offer', 'parameters', 'purchase_order_id', 'request_id', ), + 'change_offer': ('name', 'offer', 'parameters', 'purchase_order_id', 'request_id', 'billing_account', ), 'change_parameters': ('name', 'parameters', 'request_id', 'purchase_order_id', ), 'change_renewal_settings': ('name', 'renewal_settings', 'request_id', ), 'check_cloud_identity_accounts_exist': ('parent', 'domain', ), @@ -75,10 +75,11 @@ class channelCallTransformer(cst.CSTTransformer): 'list_sku_groups': ('parent', 'page_size', 'page_token', ), 'list_skus': ('parent', 'account', 'page_size', 'page_token', 'language_code', ), 'list_subscribers': ('account', 'page_size', 'page_token', ), - 'list_transferable_offers': ('parent', 'sku', 'cloud_identity_id', 'customer_name', 'page_size', 'page_token', 'language_code', ), + 'list_transferable_offers': ('parent', 'sku', 'cloud_identity_id', 'customer_name', 'page_size', 'page_token', 'language_code', 'billing_account', ), 'list_transferable_skus': ('parent', 'cloud_identity_id', 'customer_name', 'page_size', 'page_token', 'auth_token', 'language_code', ), 'lookup_offer': ('entitlement', ), 'provision_cloud_identity': ('customer', 'cloud_identity_info', 'user', 'validate_only', ), + 'query_eligible_billing_accounts': ('customer', 'skus', ), 'register_subscriber': ('account', 'service_account', ), 'run_report_job': ('name', 'date_range', 'filter', 'language_code', ), 'start_paid_service': ('name', 'request_id', ), diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py index 46c1a97c1c3a..9dfeedd87a2c 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py @@ -39,7 +39,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.type import date_pb2 # type: ignore diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py index 6b31f4b3f136..f42dedf7c1bb 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py @@ -39,7 +39,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore @@ -11652,6 +11652,161 @@ async def test_list_purchasable_offers_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + service.QueryEligibleBillingAccountsRequest, + dict, + ], +) +def test_query_eligible_billing_accounts(request_type, transport: str = "grpc"): + client = CloudChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_eligible_billing_accounts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.QueryEligibleBillingAccountsResponse() + response = client.query_eligible_billing_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.QueryEligibleBillingAccountsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.QueryEligibleBillingAccountsResponse) + + +def test_query_eligible_billing_accounts_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_eligible_billing_accounts), "__call__" + ) as call: + client.query_eligible_billing_accounts() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.QueryEligibleBillingAccountsRequest() + + +@pytest.mark.asyncio +async def test_query_eligible_billing_accounts_async( + transport: str = "grpc_asyncio", + request_type=service.QueryEligibleBillingAccountsRequest, +): + client = CloudChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_eligible_billing_accounts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.QueryEligibleBillingAccountsResponse() + ) + response = await client.query_eligible_billing_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.QueryEligibleBillingAccountsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.QueryEligibleBillingAccountsResponse) + + +@pytest.mark.asyncio +async def test_query_eligible_billing_accounts_async_from_dict(): + await test_query_eligible_billing_accounts_async(request_type=dict) + + +def test_query_eligible_billing_accounts_field_headers(): + client = CloudChannelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.QueryEligibleBillingAccountsRequest() + + request.customer = "customer_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_eligible_billing_accounts), "__call__" + ) as call: + call.return_value = service.QueryEligibleBillingAccountsResponse() + client.query_eligible_billing_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "customer=customer_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_query_eligible_billing_accounts_field_headers_async(): + client = CloudChannelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.QueryEligibleBillingAccountsRequest() + + request.customer = "customer_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_eligible_billing_accounts), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.QueryEligibleBillingAccountsResponse() + ) + await client.query_eligible_billing_accounts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "customer=customer_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -12949,6 +13104,7 @@ def test_cloud_channel_service_base_transport(): "list_offers", "list_purchasable_skus", "list_purchasable_offers", + "query_eligible_billing_accounts", "register_subscriber", "unregister_subscriber", "list_subscribers", @@ -13343,9 +13499,32 @@ def test_cloud_channel_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_channel_partner_link_path(): +def test_billing_account_path(): account = "squid" - channel_partner_link = "clam" + billing_account = "clam" + expected = "accounts/{account}/billingAccounts/{billing_account}".format( + account=account, + billing_account=billing_account, + ) + actual = CloudChannelServiceClient.billing_account_path(account, billing_account) + assert expected == actual + + +def test_parse_billing_account_path(): + expected = { + "account": "whelk", + "billing_account": "octopus", + } + path = CloudChannelServiceClient.billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudChannelServiceClient.parse_billing_account_path(path) + assert expected == actual + + +def test_channel_partner_link_path(): + account = "oyster" + channel_partner_link = "nudibranch" expected = "accounts/{account}/channelPartnerLinks/{channel_partner_link}".format( account=account, channel_partner_link=channel_partner_link, @@ -13358,8 +13537,8 @@ def test_channel_partner_link_path(): def test_parse_channel_partner_link_path(): expected = { - "account": "whelk", - "channel_partner_link": "octopus", + "account": "cuttlefish", + "channel_partner_link": "mussel", } path = CloudChannelServiceClient.channel_partner_link_path(**expected) @@ -13369,9 +13548,9 @@ def test_parse_channel_partner_link_path(): def test_channel_partner_repricing_config_path(): - account = "oyster" - channel_partner = "nudibranch" - channel_partner_repricing_config = "cuttlefish" + account = "winkle" + channel_partner = "nautilus" + channel_partner_repricing_config = "scallop" expected = "accounts/{account}/channelPartnerLinks/{channel_partner}/channelPartnerRepricingConfigs/{channel_partner_repricing_config}".format( account=account, channel_partner=channel_partner, @@ -13385,9 +13564,9 @@ def test_channel_partner_repricing_config_path(): def test_parse_channel_partner_repricing_config_path(): expected = { - "account": "mussel", - "channel_partner": "winkle", - "channel_partner_repricing_config": "nautilus", + "account": "abalone", + "channel_partner": "squid", + "channel_partner_repricing_config": "clam", } path = CloudChannelServiceClient.channel_partner_repricing_config_path(**expected) @@ -13397,8 +13576,8 @@ def test_parse_channel_partner_repricing_config_path(): def test_customer_path(): - account = "scallop" - customer = "abalone" + account = "whelk" + customer = "octopus" expected = "accounts/{account}/customers/{customer}".format( account=account, customer=customer, @@ -13409,8 +13588,8 @@ def test_customer_path(): def test_parse_customer_path(): expected = { - "account": "squid", - "customer": "clam", + "account": "oyster", + "customer": "nudibranch", } path = CloudChannelServiceClient.customer_path(**expected) @@ -13420,9 +13599,9 @@ def test_parse_customer_path(): def test_customer_repricing_config_path(): - account = "whelk" - customer = "octopus" - customer_repricing_config = "oyster" + account = "cuttlefish" + customer = "mussel" + customer_repricing_config = "winkle" expected = "accounts/{account}/customers/{customer}/customerRepricingConfigs/{customer_repricing_config}".format( account=account, customer=customer, @@ -13436,9 +13615,9 @@ def test_customer_repricing_config_path(): def test_parse_customer_repricing_config_path(): expected = { - "account": "nudibranch", - "customer": "cuttlefish", - "customer_repricing_config": "mussel", + "account": "nautilus", + "customer": "scallop", + "customer_repricing_config": "abalone", } path = CloudChannelServiceClient.customer_repricing_config_path(**expected) @@ -13448,9 +13627,9 @@ def test_parse_customer_repricing_config_path(): def test_entitlement_path(): - account = "winkle" - customer = "nautilus" - entitlement = "scallop" + account = "squid" + customer = "clam" + entitlement = "whelk" expected = ( "accounts/{account}/customers/{customer}/entitlements/{entitlement}".format( account=account, @@ -13464,9 +13643,9 @@ def test_entitlement_path(): def test_parse_entitlement_path(): expected = { - "account": "abalone", - "customer": "squid", - "entitlement": "clam", + "account": "octopus", + "customer": "oyster", + "entitlement": "nudibranch", } path = CloudChannelServiceClient.entitlement_path(**expected) @@ -13476,8 +13655,8 @@ def test_parse_entitlement_path(): def test_offer_path(): - account = "whelk" - offer = "octopus" + account = "cuttlefish" + offer = "mussel" expected = "accounts/{account}/offers/{offer}".format( account=account, offer=offer, @@ -13488,8 +13667,8 @@ def test_offer_path(): def test_parse_offer_path(): expected = { - "account": "oyster", - "offer": "nudibranch", + "account": "winkle", + "offer": "nautilus", } path = CloudChannelServiceClient.offer_path(**expected) @@ -13499,7 +13678,7 @@ def test_parse_offer_path(): def test_product_path(): - product = "cuttlefish" + product = "scallop" expected = "products/{product}".format( product=product, ) @@ -13509,7 +13688,7 @@ def test_product_path(): def test_parse_product_path(): expected = { - "product": "mussel", + "product": "abalone", } path = CloudChannelServiceClient.product_path(**expected) @@ -13519,8 +13698,8 @@ def test_parse_product_path(): def test_sku_path(): - product = "winkle" - sku = "nautilus" + product = "squid" + sku = "clam" expected = "products/{product}/skus/{sku}".format( product=product, sku=sku, @@ -13531,8 +13710,8 @@ def test_sku_path(): def test_parse_sku_path(): expected = { - "product": "scallop", - "sku": "abalone", + "product": "whelk", + "sku": "octopus", } path = CloudChannelServiceClient.sku_path(**expected) @@ -13542,8 +13721,8 @@ def test_parse_sku_path(): def test_sku_group_path(): - account = "squid" - sku_group = "clam" + account = "oyster" + sku_group = "nudibranch" expected = "accounts/{account}/skuGroups/{sku_group}".format( account=account, sku_group=sku_group, @@ -13554,8 +13733,8 @@ def test_sku_group_path(): def test_parse_sku_group_path(): expected = { - "account": "whelk", - "sku_group": "octopus", + "account": "cuttlefish", + "sku_group": "mussel", } path = CloudChannelServiceClient.sku_group_path(**expected) @@ -13565,7 +13744,7 @@ def test_parse_sku_group_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -13575,7 +13754,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "nautilus", } path = CloudChannelServiceClient.common_billing_account_path(**expected) @@ -13585,7 +13764,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -13595,7 +13774,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "abalone", } path = CloudChannelServiceClient.common_folder_path(**expected) @@ -13605,7 +13784,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -13615,7 +13794,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "clam", } path = CloudChannelServiceClient.common_organization_path(**expected) @@ -13625,7 +13804,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -13635,7 +13814,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "octopus", } path = CloudChannelServiceClient.common_project_path(**expected) @@ -13645,8 +13824,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -13657,8 +13836,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "cuttlefish", + "location": "mussel", } path = CloudChannelServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-confidentialcomputing/CHANGELOG.md b/packages/google-cloud-confidentialcomputing/CHANGELOG.md index c4b03de8431f..b11615eccd79 100644 --- a/packages/google-cloud-confidentialcomputing/CHANGELOG.md +++ b/packages/google-cloud-confidentialcomputing/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## [0.4.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-confidentialcomputing-v0.4.0...google-cloud-confidentialcomputing-v0.4.1) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + +## [0.4.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-confidentialcomputing-v0.3.0...google-cloud-confidentialcomputing-v0.4.0) (2023-08-10) + + +### Features + +* Add a new field `partial_errors` to `VerifyAttestationResponse` proto ([#11559](https://github.com/googleapis/google-cloud-python/issues/11559)) ([0c9b83a](https://github.com/googleapis/google-cloud-python/commit/0c9b83abfad6c66bc91008991b760118d1b70a01)) + +## [0.3.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-confidentialcomputing-v0.2.0...google-cloud-confidentialcomputing-v0.3.0) (2023-08-09) + + +### Features + +* Mark all fields `Optional` for `ContainerImageSignagure` proto ([#11547](https://github.com/googleapis/google-cloud-python/issues/11547)) ([f6a6175](https://github.com/googleapis/google-cloud-python/commit/f6a617587e1591c6b3aaa6b41a901f40b95b0f73)) + ## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-confidentialcomputing-v0.1.1...google-cloud-confidentialcomputing-v0.2.0) (2023-08-03) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py index 64d2059a5a9e..d8d6845be344 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.4.1" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py index 64d2059a5a9e..d8d6845be344 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.4.1" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py index f92d4b429296..96f4393bffb9 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/async_client.py @@ -44,6 +44,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.cloud.confidentialcomputing_v1.types import service diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py index 67da112b0aa8..69a5c64019cb 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py @@ -48,6 +48,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.cloud.confidentialcomputing_v1.types import service diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/types/service.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/types/service.py index d07b38f48daa..2568f1720bad 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/types/service.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/types/service.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -192,12 +193,21 @@ class VerifyAttestationResponse(proto.Message): Attributes: oidc_claims_token (str): Output only. Same as claims_token, but as a string. + partial_errors (MutableSequence[google.rpc.status_pb2.Status]): + Output only. A list of messages that carry + the partial error details related to + VerifyAttestation. """ oidc_claims_token: str = proto.Field( proto.STRING, number=2, ) + partial_errors: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) class GcpCredentials(proto.Message): @@ -372,12 +382,12 @@ class ContainerImageSignature(proto.Message): Attributes: payload (bytes): - Required. The binary signature payload following the + Optional. The binary signature payload following the SimpleSigning format https://github.com/sigstore/cosign/blob/main/specs/SIGNATURE_SPEC.md#simple-signing. This payload includes the container image digest. signature (bytes): - Required. A signature over the payload. The container image + Optional. A signature over the payload. The container image digest is incorporated into the signature as follows: 1. Generate a SimpleSigning format payload that includes the @@ -387,11 +397,9 @@ class ContainerImageSignature(proto.Message): follows: ``Sign(sha256(SimpleSigningPayload(sha256(Image Manifest))))`` public_key (bytes): - Required. An associated public key used to - verify the signature. + Optional. Reserved for future use. sig_alg (google.cloud.confidentialcomputing_v1.types.SigningAlgorithm): - Required. The algorithm used to produce the - container image signature. + Optional. Reserved for future use. """ payload: bytes = proto.Field( diff --git a/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json index 7bef1b887fdb..0f1e6a135765 100644 --- a/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json +++ b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-confidentialcomputing", - "version": "0.2.0" + "version": "0.4.1" }, "snippets": [ { diff --git a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py index f52b60527d52..c6a020a06638 100644 --- a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py +++ b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py @@ -36,6 +36,7 @@ from google.oauth2 import service_account from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers diff --git a/packages/google-cloud-config/.OwlBot.yaml b/packages/google-cloud-config/.OwlBot.yaml new file mode 100644 index 000000000000..268e6a42da75 --- /dev/null +++ b/packages/google-cloud-config/.OwlBot.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/config/(.*)/.*-py + dest: /owl-bot-staging/google-cloud-config/$1 diff --git a/packages/google-cloud-config/.coveragerc b/packages/google-cloud-config/.coveragerc new file mode 100644 index 000000000000..01f0878da1cf --- /dev/null +++ b/packages/google-cloud-config/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/config/__init__.py + google/cloud/config/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-config/.flake8 b/packages/google-cloud-config/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-config/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-config/.gitignore b/packages/google-cloud-config/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-config/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-config/.repo-metadata.json b/packages/google-cloud-config/.repo-metadata.json new file mode 100644 index 000000000000..44d7e978e8f1 --- /dev/null +++ b/packages/google-cloud-config/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "config", + "name_pretty": "Infrastructure Manager API", + "api_description": "Infrastructure Manager API", + "product_documentation": "https://cloud.google.com/infrastructure-manager/docs/overview", + "client_documentation": "https://cloud.google.com/python/docs/reference/config/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=536700", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-config", + "api_id": "config.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "config" +} diff --git a/packages/google-cloud-config/CHANGELOG.md b/packages/google-cloud-config/CHANGELOG.md new file mode 100644 index 000000000000..2163ad74ef03 --- /dev/null +++ b/packages/google-cloud-config/CHANGELOG.md @@ -0,0 +1,17 @@ +# Changelog + +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-config-v0.1.0...google-cloud-config-v0.1.1) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + +## 0.1.0 (2023-08-31) + + +### Features + +* add initial files for google.cloud.config.v1 ([#11608](https://github.com/googleapis/google-cloud-python/issues/11608)) ([c53680f](https://github.com/googleapis/google-cloud-python/commit/c53680f647738b8fc3f9cce86455dd3f195e4ff6)) + +## Changelog diff --git a/packages/google-cloud-config/CODE_OF_CONDUCT.md b/packages/google-cloud-config/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-config/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-config/CONTRIBUTING.rst b/packages/google-cloud-config/CONTRIBUTING.rst new file mode 100644 index 000000000000..00f674974164 --- /dev/null +++ b/packages/google-cloud-config/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-config + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-config/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-config/LICENSE b/packages/google-cloud-config/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-config/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-config/MANIFEST.in b/packages/google-cloud-config/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-config/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-config/README.rst b/packages/google-cloud-config/README.rst new file mode 100644 index 000000000000..b546c46a8757 --- /dev/null +++ b/packages/google-cloud-config/README.rst @@ -0,0 +1,108 @@ +Python Client for Infrastructure Manager API +============================================ + +|preview| |pypi| |versions| + +`Infrastructure Manager API`_: Infrastructure Manager API + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-config.svg + :target: https://pypi.org/project/google-cloud-config/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-config.svg + :target: https://pypi.org/project/google-cloud-config/ +.. _Infrastructure Manager API: https://cloud.google.com/infrastructure-manager/docs/overview +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/config/latest +.. _Product Documentation: https://cloud.google.com/infrastructure-manager/docs/overview + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Infrastructure Manager API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Infrastructure Manager API.: https://cloud.google.com/infrastructure-manager/docs/overview +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-config + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-config + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Infrastructure Manager API + to see other available methods on the client. +- Read the `Infrastructure Manager API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Infrastructure Manager API Product documentation: https://cloud.google.com/infrastructure-manager/docs/overview +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-config/docs/CHANGELOG.md b/packages/google-cloud-config/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-config/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-config/docs/README.rst b/packages/google-cloud-config/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-config/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-config/docs/_static/custom.css b/packages/google-cloud-config/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-config/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-config/docs/_templates/layout.html b/packages/google-cloud-config/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-config/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-config/docs/conf.py b/packages/google-cloud-config/docs/conf.py new file mode 100644 index 000000000000..d6084649bc4d --- /dev/null +++ b/packages/google-cloud-config/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-config documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-config" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-config", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-config-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-config.tex", + "google-cloud-config Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-config", + "google-cloud-config Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-config", + "google-cloud-config Documentation", + author, + "google-cloud-config", + "google-cloud-config Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-config/docs/config_v1/config.rst b/packages/google-cloud-config/docs/config_v1/config.rst new file mode 100644 index 000000000000..7ad09d4ba002 --- /dev/null +++ b/packages/google-cloud-config/docs/config_v1/config.rst @@ -0,0 +1,10 @@ +Config +------------------------ + +.. automodule:: google.cloud.config_v1.services.config + :members: + :inherited-members: + +.. automodule:: google.cloud.config_v1.services.config.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-config/docs/config_v1/services.rst b/packages/google-cloud-config/docs/config_v1/services.rst new file mode 100644 index 000000000000..51dabd45ae67 --- /dev/null +++ b/packages/google-cloud-config/docs/config_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Config v1 API +======================================= +.. toctree:: + :maxdepth: 2 + + config diff --git a/packages/google-cloud-config/docs/config_v1/types.rst b/packages/google-cloud-config/docs/config_v1/types.rst new file mode 100644 index 000000000000..33bb93d6c861 --- /dev/null +++ b/packages/google-cloud-config/docs/config_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Config v1 API +==================================== + +.. automodule:: google.cloud.config_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-config/docs/index.rst b/packages/google-cloud-config/docs/index.rst new file mode 100644 index 000000000000..4ed8af0d181a --- /dev/null +++ b/packages/google-cloud-config/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + config_v1/services + config_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-config`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-config/docs/multiprocessing.rst b/packages/google-cloud-config/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-config/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-config/google/cloud/config/__init__.py b/packages/google-cloud-config/google/cloud/config/__init__.py new file mode 100644 index 000000000000..b86f5ddc9ec0 --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config/__init__.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.config import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.config_v1.services.config.async_client import ConfigAsyncClient +from google.cloud.config_v1.services.config.client import ConfigClient +from google.cloud.config_v1.types.config import ( + ApplyResults, + CreateDeploymentRequest, + DeleteDeploymentRequest, + DeleteStatefileRequest, + Deployment, + DeploymentOperationMetadata, + ExportDeploymentStatefileRequest, + ExportLockInfoRequest, + ExportRevisionStatefileRequest, + GetDeploymentRequest, + GetResourceRequest, + GetRevisionRequest, + GitSource, + ImportStatefileRequest, + ListDeploymentsRequest, + ListDeploymentsResponse, + ListResourcesRequest, + ListResourcesResponse, + ListRevisionsRequest, + ListRevisionsResponse, + LockDeploymentRequest, + LockInfo, + OperationMetadata, + Resource, + ResourceCAIInfo, + ResourceTerraformInfo, + Revision, + Statefile, + TerraformBlueprint, + TerraformError, + TerraformOutput, + TerraformVariable, + UnlockDeploymentRequest, + UpdateDeploymentRequest, +) + +__all__ = ( + "ConfigClient", + "ConfigAsyncClient", + "ApplyResults", + "CreateDeploymentRequest", + "DeleteDeploymentRequest", + "DeleteStatefileRequest", + "Deployment", + "DeploymentOperationMetadata", + "ExportDeploymentStatefileRequest", + "ExportLockInfoRequest", + "ExportRevisionStatefileRequest", + "GetDeploymentRequest", + "GetResourceRequest", + "GetRevisionRequest", + "GitSource", + "ImportStatefileRequest", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "ListResourcesRequest", + "ListResourcesResponse", + "ListRevisionsRequest", + "ListRevisionsResponse", + "LockDeploymentRequest", + "LockInfo", + "OperationMetadata", + "Resource", + "ResourceCAIInfo", + "ResourceTerraformInfo", + "Revision", + "Statefile", + "TerraformBlueprint", + "TerraformError", + "TerraformOutput", + "TerraformVariable", + "UnlockDeploymentRequest", + "UpdateDeploymentRequest", +) diff --git a/packages/google-cloud-config/google/cloud/config/gapic_version.py b/packages/google-cloud-config/google/cloud/config/gapic_version.py new file mode 100644 index 000000000000..123d60293175 --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config/py.typed b/packages/google-cloud-config/google/cloud/config/py.typed new file mode 100644 index 000000000000..f4fe63d2606c --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-config package uses inline types. diff --git a/packages/google-cloud-config/google/cloud/config_v1/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/__init__.py new file mode 100644 index 000000000000..a407a3aa0ff8 --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/__init__.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.config_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.config import ConfigAsyncClient, ConfigClient +from .types.config import ( + ApplyResults, + CreateDeploymentRequest, + DeleteDeploymentRequest, + DeleteStatefileRequest, + Deployment, + DeploymentOperationMetadata, + ExportDeploymentStatefileRequest, + ExportLockInfoRequest, + ExportRevisionStatefileRequest, + GetDeploymentRequest, + GetResourceRequest, + GetRevisionRequest, + GitSource, + ImportStatefileRequest, + ListDeploymentsRequest, + ListDeploymentsResponse, + ListResourcesRequest, + ListResourcesResponse, + ListRevisionsRequest, + ListRevisionsResponse, + LockDeploymentRequest, + LockInfo, + OperationMetadata, + Resource, + ResourceCAIInfo, + ResourceTerraformInfo, + Revision, + Statefile, + TerraformBlueprint, + TerraformError, + TerraformOutput, + TerraformVariable, + UnlockDeploymentRequest, + UpdateDeploymentRequest, +) + +__all__ = ( + "ConfigAsyncClient", + "ApplyResults", + "ConfigClient", + "CreateDeploymentRequest", + "DeleteDeploymentRequest", + "DeleteStatefileRequest", + "Deployment", + "DeploymentOperationMetadata", + "ExportDeploymentStatefileRequest", + "ExportLockInfoRequest", + "ExportRevisionStatefileRequest", + "GetDeploymentRequest", + "GetResourceRequest", + "GetRevisionRequest", + "GitSource", + "ImportStatefileRequest", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "ListResourcesRequest", + "ListResourcesResponse", + "ListRevisionsRequest", + "ListRevisionsResponse", + "LockDeploymentRequest", + "LockInfo", + "OperationMetadata", + "Resource", + "ResourceCAIInfo", + "ResourceTerraformInfo", + "Revision", + "Statefile", + "TerraformBlueprint", + "TerraformError", + "TerraformOutput", + "TerraformVariable", + "UnlockDeploymentRequest", + "UpdateDeploymentRequest", +) diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json b/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json new file mode 100644 index 000000000000..65de7b78a0aa --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json @@ -0,0 +1,268 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.config_v1", + "protoPackage": "google.cloud.config.v1", + "schema": "1.0", + "services": { + "Config": { + "clients": { + "grpc": { + "libraryClient": "ConfigClient", + "rpcs": { + "CreateDeployment": { + "methods": [ + "create_deployment" + ] + }, + "DeleteDeployment": { + "methods": [ + "delete_deployment" + ] + }, + "DeleteStatefile": { + "methods": [ + "delete_statefile" + ] + }, + "ExportDeploymentStatefile": { + "methods": [ + "export_deployment_statefile" + ] + }, + "ExportLockInfo": { + "methods": [ + "export_lock_info" + ] + }, + "ExportRevisionStatefile": { + "methods": [ + "export_revision_statefile" + ] + }, + "GetDeployment": { + "methods": [ + "get_deployment" + ] + }, + "GetResource": { + "methods": [ + "get_resource" + ] + }, + "GetRevision": { + "methods": [ + "get_revision" + ] + }, + "ImportStatefile": { + "methods": [ + "import_statefile" + ] + }, + "ListDeployments": { + "methods": [ + "list_deployments" + ] + }, + "ListResources": { + "methods": [ + "list_resources" + ] + }, + "ListRevisions": { + "methods": [ + "list_revisions" + ] + }, + "LockDeployment": { + "methods": [ + "lock_deployment" + ] + }, + "UnlockDeployment": { + "methods": [ + "unlock_deployment" + ] + }, + "UpdateDeployment": { + "methods": [ + "update_deployment" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ConfigAsyncClient", + "rpcs": { + "CreateDeployment": { + "methods": [ + "create_deployment" + ] + }, + "DeleteDeployment": { + "methods": [ + "delete_deployment" + ] + }, + "DeleteStatefile": { + "methods": [ + "delete_statefile" + ] + }, + "ExportDeploymentStatefile": { + "methods": [ + "export_deployment_statefile" + ] + }, + "ExportLockInfo": { + "methods": [ + "export_lock_info" + ] + }, + "ExportRevisionStatefile": { + "methods": [ + "export_revision_statefile" + ] + }, + "GetDeployment": { + "methods": [ + "get_deployment" + ] + }, + "GetResource": { + "methods": [ + "get_resource" + ] + }, + "GetRevision": { + "methods": [ + "get_revision" + ] + }, + "ImportStatefile": { + "methods": [ + "import_statefile" + ] + }, + "ListDeployments": { + "methods": [ + "list_deployments" + ] + }, + "ListResources": { + "methods": [ + "list_resources" + ] + }, + "ListRevisions": { + "methods": [ + "list_revisions" + ] + }, + "LockDeployment": { + "methods": [ + "lock_deployment" + ] + }, + "UnlockDeployment": { + "methods": [ + "unlock_deployment" + ] + }, + "UpdateDeployment": { + "methods": [ + "update_deployment" + ] + } + } + }, + "rest": { + "libraryClient": "ConfigClient", + "rpcs": { + "CreateDeployment": { + "methods": [ + "create_deployment" + ] + }, + "DeleteDeployment": { + "methods": [ + "delete_deployment" + ] + }, + "DeleteStatefile": { + "methods": [ + "delete_statefile" + ] + }, + "ExportDeploymentStatefile": { + "methods": [ + "export_deployment_statefile" + ] + }, + "ExportLockInfo": { + "methods": [ + "export_lock_info" + ] + }, + "ExportRevisionStatefile": { + "methods": [ + "export_revision_statefile" + ] + }, + "GetDeployment": { + "methods": [ + "get_deployment" + ] + }, + "GetResource": { + "methods": [ + "get_resource" + ] + }, + "GetRevision": { + "methods": [ + "get_revision" + ] + }, + "ImportStatefile": { + "methods": [ + "import_statefile" + ] + }, + "ListDeployments": { + "methods": [ + "list_deployments" + ] + }, + "ListResources": { + "methods": [ + "list_resources" + ] + }, + "ListRevisions": { + "methods": [ + "list_revisions" + ] + }, + "LockDeployment": { + "methods": [ + "lock_deployment" + ] + }, + "UnlockDeployment": { + "methods": [ + "unlock_deployment" + ] + }, + "UpdateDeployment": { + "methods": [ + "update_deployment" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py new file mode 100644 index 000000000000..123d60293175 --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/py.typed b/packages/google-cloud-config/google/cloud/config_v1/py.typed new file mode 100644 index 000000000000..f4fe63d2606c --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-config package uses inline types. diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/__init__.py new file mode 100644 index 000000000000..da4d9712072a --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ConfigAsyncClient +from .client import ConfigClient + +__all__ = ( + "ConfigClient", + "ConfigAsyncClient", +) diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py new file mode 100644 index 000000000000..d2b13a5861ec --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py @@ -0,0 +1,2637 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.config_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.config_v1.services.config import pagers +from google.cloud.config_v1.types import config + +from .client import ConfigClient +from .transports.base import DEFAULT_CLIENT_INFO, ConfigTransport +from .transports.grpc_asyncio import ConfigGrpcAsyncIOTransport + + +class ConfigAsyncClient: + """Infrastructure Manager is a managed service that automates + the deployment and management of Google Cloud infrastructure + resources. + """ + + _client: ConfigClient + + DEFAULT_ENDPOINT = ConfigClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ConfigClient.DEFAULT_MTLS_ENDPOINT + + deployment_path = staticmethod(ConfigClient.deployment_path) + parse_deployment_path = staticmethod(ConfigClient.parse_deployment_path) + resource_path = staticmethod(ConfigClient.resource_path) + parse_resource_path = staticmethod(ConfigClient.parse_resource_path) + revision_path = staticmethod(ConfigClient.revision_path) + parse_revision_path = staticmethod(ConfigClient.parse_revision_path) + service_account_path = staticmethod(ConfigClient.service_account_path) + parse_service_account_path = staticmethod(ConfigClient.parse_service_account_path) + worker_pool_path = staticmethod(ConfigClient.worker_pool_path) + parse_worker_pool_path = staticmethod(ConfigClient.parse_worker_pool_path) + common_billing_account_path = staticmethod(ConfigClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod( + ConfigClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ConfigClient.common_folder_path) + parse_common_folder_path = staticmethod(ConfigClient.parse_common_folder_path) + common_organization_path = staticmethod(ConfigClient.common_organization_path) + parse_common_organization_path = staticmethod( + ConfigClient.parse_common_organization_path + ) + common_project_path = staticmethod(ConfigClient.common_project_path) + parse_common_project_path = staticmethod(ConfigClient.parse_common_project_path) + common_location_path = staticmethod(ConfigClient.common_location_path) + parse_common_location_path = staticmethod(ConfigClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigAsyncClient: The constructed client. + """ + return ConfigClient.from_service_account_info.__func__(ConfigAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigAsyncClient: The constructed client. + """ + return ConfigClient.from_service_account_file.__func__(ConfigAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ConfigClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ConfigTransport: + """Returns the transport used by the client instance. + + Returns: + ConfigTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ConfigClient).get_transport_class, type(ConfigClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ConfigTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the config client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ConfigTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ConfigClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_deployments( + self, + request: Optional[Union[config.ListDeploymentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeploymentsAsyncPager: + r"""Lists [Deployment][google.cloud.config.v1.Deployment]s in a + given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_list_deployments(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ListDeploymentsRequest, dict]]): + The request object. + parent (:class:`str`): + Required. The parent in whose context the Deployments + are listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.services.config.pagers.ListDeploymentsAsyncPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.ListDeploymentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_deployments, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeploymentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deployment( + self, + request: Optional[Union[config.GetDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Deployment: + r"""Gets details about a + [Deployment][google.cloud.config.v1.Deployment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_get_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.GetDeploymentRequest, dict]]): + The request object. + name (:class:`str`): + Required. The name of the deployment. Format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Deployment: + A Deployment is a group of resources + and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.GetDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_deployment( + self, + request: Optional[Union[config.CreateDeploymentRequest, dict]] = None, + *, + parent: Optional[str] = None, + deployment: Optional[config.Deployment] = None, + deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a [Deployment][google.cloud.config.v1.Deployment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_create_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + deployment = config_v1.Deployment() + deployment.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreateDeploymentRequest( + parent="parent_value", + deployment_id="deployment_id_value", + deployment=deployment, + ) + + # Make the request + operation = client.create_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.CreateDeploymentRequest, dict]]): + The request object. + parent (:class:`str`): + Required. The parent in whose context the Deployment is + created. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment (:class:`google.cloud.config_v1.types.Deployment`): + Required. + [Deployment][google.cloud.config.v1.Deployment] resource + to be created. + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_id (:class:`str`): + Required. The Deployment ID. + This corresponds to the ``deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Deployment` A Deployment is a group of resources and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deployment, deployment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.CreateDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deployment is not None: + request.deployment = deployment + if deployment_id is not None: + request.deployment_id = deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.Deployment, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_deployment( + self, + request: Optional[Union[config.UpdateDeploymentRequest, dict]] = None, + *, + deployment: Optional[config.Deployment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a [Deployment][google.cloud.config.v1.Deployment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_update_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + deployment = config_v1.Deployment() + deployment.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.UpdateDeploymentRequest( + deployment=deployment, + ) + + # Make the request + operation = client.update_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.UpdateDeploymentRequest, dict]]): + The request object. + deployment (:class:`google.cloud.config_v1.types.Deployment`): + Required. + [Deployment][google.cloud.config.v1.Deployment] to + update. + + The deployment's ``name`` field is used to identify the + resource to be updated. Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask used to specify the fields to be + overwritten in the Deployment resource by the update. + + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Deployment` A Deployment is a group of resources and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deployment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.UpdateDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deployment is not None: + request.deployment = deployment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deployment.name", request.deployment.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.Deployment, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_deployment( + self, + request: Optional[Union[config.DeleteDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a [Deployment][google.cloud.config.v1.Deployment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_delete_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeleteDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.DeleteDeploymentRequest, dict]]): + The request object. + name (:class:`str`): + Required. The name of the Deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Deployment` A Deployment is a group of resources and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.DeleteDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.Deployment, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_revisions( + self, + request: Optional[Union[config.ListRevisionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRevisionsAsyncPager: + r"""Lists [Revision][google.cloud.config.v1.Revision]s of a + deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_list_revisions(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListRevisionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_revisions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ListRevisionsRequest, dict]]): + The request object. A request to list Revisions passed to + a 'ListRevisions' call. + parent (:class:`str`): + Required. The parent in whose context the Revisions are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.services.config.pagers.ListRevisionsAsyncPager: + A response to a 'ListRevisions' call. + Contains a list of Revisions. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.ListRevisionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_revisions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListRevisionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_revision( + self, + request: Optional[Union[config.GetRevisionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Revision: + r"""Gets details about a + [Revision][google.cloud.config.v1.Revision]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_get_revision(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetRevisionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_revision(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.GetRevisionRequest, dict]]): + The request object. A request to get a Revision from a + 'GetRevision' call. + name (:class:`str`): + Required. The name of the Revision in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}/revisions/{revision}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Revision: + A child resource of a Deployment + generated by a 'CreateDeployment' or + 'UpdateDeployment' call. Each Revision + contains metadata pertaining to a + snapshot of a particular Deployment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.GetRevisionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_revision, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_resource( + self, + request: Optional[Union[config.GetResourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Resource: + r"""Gets details about a [Resource][google.cloud.config.v1.Resource] + deployed by Infra Manager. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_get_resource(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetResourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_resource(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.GetResourceRequest, dict]]): + The request object. A request to get a Resource from a + 'GetResource' call. + name (:class:`str`): + Required. The name of the Resource in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}/revisions/{revision}/resource/{resource}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Resource: + Resource represents a Google Cloud + Platform resource actuated by IM. + Resources are child resources of + Revisions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.GetResourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_resource, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_resources( + self, + request: Optional[Union[config.ListResourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListResourcesAsyncPager: + r"""Lists [Resource][google.cloud.config.v1.Resource]s in a given + revision. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_list_resources(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListResourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_resources(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ListResourcesRequest, dict]]): + The request object. A request to list Resources passed to + a 'ListResources' call. + parent (:class:`str`): + Required. The parent in whose context the Resources are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}/revisions/{revision}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.services.config.pagers.ListResourcesAsyncPager: + A response to a 'ListResources' call. + Contains a list of Resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.ListResourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_resources, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListResourcesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def export_deployment_statefile( + self, + request: Optional[Union[config.ExportDeploymentStatefileRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Statefile: + r"""Exports Terraform state file from a given deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_export_deployment_statefile(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ExportDeploymentStatefileRequest( + parent="parent_value", + ) + + # Make the request + response = await client.export_deployment_statefile(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ExportDeploymentStatefileRequest, dict]]): + The request object. A request to export a state file + passed to a 'ExportDeploymentStatefile' + call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Statefile: + Contains info about a Terraform state + file + + """ + # Create or coerce a protobuf request object. + request = config.ExportDeploymentStatefileRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_deployment_statefile, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def export_revision_statefile( + self, + request: Optional[Union[config.ExportRevisionStatefileRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Statefile: + r"""Exports Terraform state file from a given revision. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_export_revision_statefile(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ExportRevisionStatefileRequest( + parent="parent_value", + ) + + # Make the request + response = await client.export_revision_statefile(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ExportRevisionStatefileRequest, dict]]): + The request object. A request to export a state file + passed to a 'ExportRevisionStatefile' + call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Statefile: + Contains info about a Terraform state + file + + """ + # Create or coerce a protobuf request object. + request = config.ExportRevisionStatefileRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_revision_statefile, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def import_statefile( + self, + request: Optional[Union[config.ImportStatefileRequest, dict]] = None, + *, + parent: Optional[str] = None, + lock_id: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Statefile: + r"""Imports Terraform state file in a given deployment. + The state file does not take effect until the Deployment + has been unlocked. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_import_statefile(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ImportStatefileRequest( + parent="parent_value", + lock_id=725, + ) + + # Make the request + response = await client.import_statefile(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ImportStatefileRequest, dict]]): + The request object. A request to import a state file + passed to a 'ImportStatefile' call. + parent (:class:`str`): + Required. The parent in whose context the statefile is + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lock_id (:class:`int`): + Required. Lock ID of the lock file to + verify that the user who is importing + the state file previously locked the + Deployment. + + This corresponds to the ``lock_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Statefile: + Contains info about a Terraform state + file + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, lock_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.ImportStatefileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lock_id is not None: + request.lock_id = lock_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_statefile, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_statefile( + self, + request: Optional[Union[config.DeleteStatefileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes Terraform state file in a given deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_delete_statefile(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeleteStatefileRequest( + name="name_value", + lock_id=725, + ) + + # Make the request + await client.delete_statefile(request=request) + + Args: + request (Optional[Union[google.cloud.config_v1.types.DeleteStatefileRequest, dict]]): + The request object. A request to delete a state file + passed to a 'DeleteStatefile' call. + name (:class:`str`): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.DeleteStatefileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_statefile, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def lock_deployment( + self, + request: Optional[Union[config.LockDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Locks a deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_lock_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.LockDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.lock_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.LockDeploymentRequest, dict]]): + The request object. A request to lock a deployment passed + to a 'LockDeployment' call. + name (:class:`str`): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Deployment` A Deployment is a group of resources and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.LockDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.lock_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.Deployment, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def unlock_deployment( + self, + request: Optional[Union[config.UnlockDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + lock_id: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Unlocks a locked deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_unlock_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.UnlockDeploymentRequest( + name="name_value", + lock_id=725, + ) + + # Make the request + operation = client.unlock_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.UnlockDeploymentRequest, dict]]): + The request object. A request to unlock a state file + passed to a 'UnlockDeployment' call. + name (:class:`str`): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lock_id (:class:`int`): + Required. Lock ID of the lock file to + be unlocked. + + This corresponds to the ``lock_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Deployment` A Deployment is a group of resources and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, lock_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.UnlockDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if lock_id is not None: + request.lock_id = lock_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.unlock_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.Deployment, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def export_lock_info( + self, + request: Optional[Union[config.ExportLockInfoRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.LockInfo: + r"""Exports the lock info on a locked deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_export_lock_info(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ExportLockInfoRequest( + name="name_value", + ) + + # Make the request + response = await client.export_lock_info(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ExportLockInfoRequest, dict]]): + The request object. A request to get a state file lock + info passed to a 'ExportLockInfo' call. + name (:class:`str`): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.LockInfo: + Details about the lock which locked + the deployment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.ExportLockInfoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_lock_info, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ConfigAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ConfigAsyncClient",) diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py new file mode 100644 index 000000000000..d3ff68354028 --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py @@ -0,0 +1,2975 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.config_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.config_v1.services.config import pagers +from google.cloud.config_v1.types import config + +from .transports.base import DEFAULT_CLIENT_INFO, ConfigTransport +from .transports.grpc import ConfigGrpcTransport +from .transports.grpc_asyncio import ConfigGrpcAsyncIOTransport +from .transports.rest import ConfigRestTransport + + +class ConfigClientMeta(type): + """Metaclass for the Config client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigTransport]] + _transport_registry["grpc"] = ConfigGrpcTransport + _transport_registry["grpc_asyncio"] = ConfigGrpcAsyncIOTransport + _transport_registry["rest"] = ConfigRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ConfigTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ConfigClient(metaclass=ConfigClientMeta): + """Infrastructure Manager is a managed service that automates + the deployment and management of Google Cloud infrastructure + resources. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "config.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConfigTransport: + """Returns the transport used by the client instance. + + Returns: + ConfigTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def deployment_path( + project: str, + location: str, + deployment: str, + ) -> str: + """Returns a fully-qualified deployment string.""" + return ( + "projects/{project}/locations/{location}/deployments/{deployment}".format( + project=project, + location=location, + deployment=deployment, + ) + ) + + @staticmethod + def parse_deployment_path(path: str) -> Dict[str, str]: + """Parses a deployment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deployments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def resource_path( + project: str, + location: str, + deployment: str, + revision: str, + resource: str, + ) -> str: + """Returns a fully-qualified resource string.""" + return "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}/resources/{resource}".format( + project=project, + location=location, + deployment=deployment, + revision=revision, + resource=resource, + ) + + @staticmethod + def parse_resource_path(path: str) -> Dict[str, str]: + """Parses a resource path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deployments/(?P.+?)/revisions/(?P.+?)/resources/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def revision_path( + project: str, + location: str, + deployment: str, + revision: str, + ) -> str: + """Returns a fully-qualified revision string.""" + return "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}".format( + project=project, + location=location, + deployment=deployment, + revision=revision, + ) + + @staticmethod + def parse_revision_path(path: str) -> Dict[str, str]: + """Parses a revision path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deployments/(?P.+?)/revisions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def service_account_path( + project: str, + service_account: str, + ) -> str: + """Returns a fully-qualified service_account string.""" + return "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) + + @staticmethod + def parse_service_account_path(path: str) -> Dict[str, str]: + """Parses a service_account path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def worker_pool_path( + project: str, + location: str, + worker_pool: str, + ) -> str: + """Returns a fully-qualified worker_pool string.""" + return ( + "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( + project=project, + location=location, + worker_pool=worker_pool, + ) + ) + + @staticmethod + def parse_worker_pool_path(path: str) -> Dict[str, str]: + """Parses a worker_pool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workerPools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ConfigTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the config client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ConfigTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ConfigTransport): + # transport is a ConfigTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_deployments( + self, + request: Optional[Union[config.ListDeploymentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeploymentsPager: + r"""Lists [Deployment][google.cloud.config.v1.Deployment]s in a + given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_list_deployments(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ListDeploymentsRequest, dict]): + The request object. + parent (str): + Required. The parent in whose context the Deployments + are listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.services.config.pagers.ListDeploymentsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.ListDeploymentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.ListDeploymentsRequest): + request = config.ListDeploymentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deployments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeploymentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deployment( + self, + request: Optional[Union[config.GetDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Deployment: + r"""Gets details about a + [Deployment][google.cloud.config.v1.Deployment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_get_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.GetDeploymentRequest, dict]): + The request object. + name (str): + Required. The name of the deployment. Format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Deployment: + A Deployment is a group of resources + and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.GetDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.GetDeploymentRequest): + request = config.GetDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_deployment( + self, + request: Optional[Union[config.CreateDeploymentRequest, dict]] = None, + *, + parent: Optional[str] = None, + deployment: Optional[config.Deployment] = None, + deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a [Deployment][google.cloud.config.v1.Deployment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_create_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + deployment = config_v1.Deployment() + deployment.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreateDeploymentRequest( + parent="parent_value", + deployment_id="deployment_id_value", + deployment=deployment, + ) + + # Make the request + operation = client.create_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.CreateDeploymentRequest, dict]): + The request object. + parent (str): + Required. The parent in whose context the Deployment is + created. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment (google.cloud.config_v1.types.Deployment): + Required. + [Deployment][google.cloud.config.v1.Deployment] resource + to be created. + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_id (str): + Required. The Deployment ID. + This corresponds to the ``deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Deployment` A Deployment is a group of resources and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deployment, deployment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.CreateDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.CreateDeploymentRequest): + request = config.CreateDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deployment is not None: + request.deployment = deployment + if deployment_id is not None: + request.deployment_id = deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.Deployment, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_deployment( + self, + request: Optional[Union[config.UpdateDeploymentRequest, dict]] = None, + *, + deployment: Optional[config.Deployment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a [Deployment][google.cloud.config.v1.Deployment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_update_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + deployment = config_v1.Deployment() + deployment.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.UpdateDeploymentRequest( + deployment=deployment, + ) + + # Make the request + operation = client.update_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.UpdateDeploymentRequest, dict]): + The request object. + deployment (google.cloud.config_v1.types.Deployment): + Required. + [Deployment][google.cloud.config.v1.Deployment] to + update. + + The deployment's ``name`` field is used to identify the + resource to be updated. Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask used to specify the fields to be + overwritten in the Deployment resource by the update. + + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Deployment` A Deployment is a group of resources and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deployment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.UpdateDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.UpdateDeploymentRequest): + request = config.UpdateDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deployment is not None: + request.deployment = deployment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deployment.name", request.deployment.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.Deployment, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_deployment( + self, + request: Optional[Union[config.DeleteDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a [Deployment][google.cloud.config.v1.Deployment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_delete_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeleteDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.DeleteDeploymentRequest, dict]): + The request object. + name (str): + Required. The name of the Deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Deployment` A Deployment is a group of resources and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.DeleteDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.DeleteDeploymentRequest): + request = config.DeleteDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.Deployment, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_revisions( + self, + request: Optional[Union[config.ListRevisionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListRevisionsPager: + r"""Lists [Revision][google.cloud.config.v1.Revision]s of a + deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_list_revisions(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListRevisionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ListRevisionsRequest, dict]): + The request object. A request to list Revisions passed to + a 'ListRevisions' call. + parent (str): + Required. The parent in whose context the Revisions are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.services.config.pagers.ListRevisionsPager: + A response to a 'ListRevisions' call. + Contains a list of Revisions. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.ListRevisionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.ListRevisionsRequest): + request = config.ListRevisionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_revisions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListRevisionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_revision( + self, + request: Optional[Union[config.GetRevisionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Revision: + r"""Gets details about a + [Revision][google.cloud.config.v1.Revision]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_get_revision(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetRevisionRequest( + name="name_value", + ) + + # Make the request + response = client.get_revision(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.GetRevisionRequest, dict]): + The request object. A request to get a Revision from a + 'GetRevision' call. + name (str): + Required. The name of the Revision in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}/revisions/{revision}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Revision: + A child resource of a Deployment + generated by a 'CreateDeployment' or + 'UpdateDeployment' call. Each Revision + contains metadata pertaining to a + snapshot of a particular Deployment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.GetRevisionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.GetRevisionRequest): + request = config.GetRevisionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_revision] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_resource( + self, + request: Optional[Union[config.GetResourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Resource: + r"""Gets details about a [Resource][google.cloud.config.v1.Resource] + deployed by Infra Manager. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_get_resource(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetResourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_resource(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.GetResourceRequest, dict]): + The request object. A request to get a Resource from a + 'GetResource' call. + name (str): + Required. The name of the Resource in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}/revisions/{revision}/resource/{resource}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Resource: + Resource represents a Google Cloud + Platform resource actuated by IM. + Resources are child resources of + Revisions. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.GetResourceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.GetResourceRequest): + request = config.GetResourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_resource] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_resources( + self, + request: Optional[Union[config.ListResourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListResourcesPager: + r"""Lists [Resource][google.cloud.config.v1.Resource]s in a given + revision. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_list_resources(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListResourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_resources(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ListResourcesRequest, dict]): + The request object. A request to list Resources passed to + a 'ListResources' call. + parent (str): + Required. The parent in whose context the Resources are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}/revisions/{revision}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.services.config.pagers.ListResourcesPager: + A response to a 'ListResources' call. + Contains a list of Resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.ListResourcesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.ListResourcesRequest): + request = config.ListResourcesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_resources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListResourcesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def export_deployment_statefile( + self, + request: Optional[Union[config.ExportDeploymentStatefileRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Statefile: + r"""Exports Terraform state file from a given deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_export_deployment_statefile(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ExportDeploymentStatefileRequest( + parent="parent_value", + ) + + # Make the request + response = client.export_deployment_statefile(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ExportDeploymentStatefileRequest, dict]): + The request object. A request to export a state file + passed to a 'ExportDeploymentStatefile' + call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Statefile: + Contains info about a Terraform state + file + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a config.ExportDeploymentStatefileRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.ExportDeploymentStatefileRequest): + request = config.ExportDeploymentStatefileRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.export_deployment_statefile + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def export_revision_statefile( + self, + request: Optional[Union[config.ExportRevisionStatefileRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Statefile: + r"""Exports Terraform state file from a given revision. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_export_revision_statefile(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ExportRevisionStatefileRequest( + parent="parent_value", + ) + + # Make the request + response = client.export_revision_statefile(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ExportRevisionStatefileRequest, dict]): + The request object. A request to export a state file + passed to a 'ExportRevisionStatefile' + call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Statefile: + Contains info about a Terraform state + file + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a config.ExportRevisionStatefileRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.ExportRevisionStatefileRequest): + request = config.ExportRevisionStatefileRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.export_revision_statefile + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def import_statefile( + self, + request: Optional[Union[config.ImportStatefileRequest, dict]] = None, + *, + parent: Optional[str] = None, + lock_id: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Statefile: + r"""Imports Terraform state file in a given deployment. + The state file does not take effect until the Deployment + has been unlocked. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_import_statefile(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ImportStatefileRequest( + parent="parent_value", + lock_id=725, + ) + + # Make the request + response = client.import_statefile(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ImportStatefileRequest, dict]): + The request object. A request to import a state file + passed to a 'ImportStatefile' call. + parent (str): + Required. The parent in whose context the statefile is + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lock_id (int): + Required. Lock ID of the lock file to + verify that the user who is importing + the state file previously locked the + Deployment. + + This corresponds to the ``lock_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Statefile: + Contains info about a Terraform state + file + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, lock_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.ImportStatefileRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.ImportStatefileRequest): + request = config.ImportStatefileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lock_id is not None: + request.lock_id = lock_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_statefile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_statefile( + self, + request: Optional[Union[config.DeleteStatefileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes Terraform state file in a given deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_delete_statefile(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeleteStatefileRequest( + name="name_value", + lock_id=725, + ) + + # Make the request + client.delete_statefile(request=request) + + Args: + request (Union[google.cloud.config_v1.types.DeleteStatefileRequest, dict]): + The request object. A request to delete a state file + passed to a 'DeleteStatefile' call. + name (str): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.DeleteStatefileRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.DeleteStatefileRequest): + request = config.DeleteStatefileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_statefile] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def lock_deployment( + self, + request: Optional[Union[config.LockDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Locks a deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_lock_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.LockDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.lock_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.LockDeploymentRequest, dict]): + The request object. A request to lock a deployment passed + to a 'LockDeployment' call. + name (str): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Deployment` A Deployment is a group of resources and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.LockDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.LockDeploymentRequest): + request = config.LockDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lock_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.Deployment, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def unlock_deployment( + self, + request: Optional[Union[config.UnlockDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + lock_id: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Unlocks a locked deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_unlock_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.UnlockDeploymentRequest( + name="name_value", + lock_id=725, + ) + + # Make the request + operation = client.unlock_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.UnlockDeploymentRequest, dict]): + The request object. A request to unlock a state file + passed to a 'UnlockDeployment' call. + name (str): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lock_id (int): + Required. Lock ID of the lock file to + be unlocked. + + This corresponds to the ``lock_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Deployment` A Deployment is a group of resources and configs managed and provisioned by + Infra Manager. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, lock_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.UnlockDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.UnlockDeploymentRequest): + request = config.UnlockDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if lock_id is not None: + request.lock_id = lock_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.unlock_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.Deployment, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def export_lock_info( + self, + request: Optional[Union[config.ExportLockInfoRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.LockInfo: + r"""Exports the lock info on a locked deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_export_lock_info(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ExportLockInfoRequest( + name="name_value", + ) + + # Make the request + response = client.export_lock_info(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ExportLockInfoRequest, dict]): + The request object. A request to get a state file lock + info passed to a 'ExportLockInfo' call. + name (str): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.LockInfo: + Details about the lock which locked + the deployment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.ExportLockInfoRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.ExportLockInfoRequest): + request = config.ExportLockInfoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_lock_info] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ConfigClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ConfigClient",) diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py new file mode 100644 index 000000000000..d0ccc958ea38 --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.config_v1.types import config + + +class ListDeploymentsPager: + """A pager for iterating through ``list_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListDeploymentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deployments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeployments`` requests and continue to iterate + through the ``deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., config.ListDeploymentsResponse], + request: config.ListDeploymentsRequest, + response: config.ListDeploymentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListDeploymentsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListDeploymentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = config.ListDeploymentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[config.ListDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[config.Deployment]: + for page in self.pages: + yield from page.deployments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeploymentsAsyncPager: + """A pager for iterating through ``list_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListDeploymentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deployments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeployments`` requests and continue to iterate + through the ``deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[config.ListDeploymentsResponse]], + request: config.ListDeploymentsRequest, + response: config.ListDeploymentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListDeploymentsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListDeploymentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = config.ListDeploymentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[config.ListDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[config.Deployment]: + async def async_generator(): + async for page in self.pages: + for response in page.deployments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRevisionsPager: + """A pager for iterating through ``list_revisions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListRevisionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``revisions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListRevisions`` requests and continue to iterate + through the ``revisions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., config.ListRevisionsResponse], + request: config.ListRevisionsRequest, + response: config.ListRevisionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListRevisionsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListRevisionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = config.ListRevisionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[config.ListRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[config.Revision]: + for page in self.pages: + yield from page.revisions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListRevisionsAsyncPager: + """A pager for iterating through ``list_revisions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListRevisionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``revisions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListRevisions`` requests and continue to iterate + through the ``revisions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[config.ListRevisionsResponse]], + request: config.ListRevisionsRequest, + response: config.ListRevisionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListRevisionsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListRevisionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = config.ListRevisionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[config.ListRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[config.Revision]: + async def async_generator(): + async for page in self.pages: + for response in page.revisions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListResourcesPager: + """A pager for iterating through ``list_resources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListResourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``resources`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListResources`` requests and continue to iterate + through the ``resources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListResourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., config.ListResourcesResponse], + request: config.ListResourcesRequest, + response: config.ListResourcesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListResourcesRequest): + The initial request object. + response (google.cloud.config_v1.types.ListResourcesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = config.ListResourcesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[config.ListResourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[config.Resource]: + for page in self.pages: + yield from page.resources + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListResourcesAsyncPager: + """A pager for iterating through ``list_resources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListResourcesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``resources`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListResources`` requests and continue to iterate + through the ``resources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListResourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[config.ListResourcesResponse]], + request: config.ListResourcesRequest, + response: config.ListResourcesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListResourcesRequest): + The initial request object. + response (google.cloud.config_v1.types.ListResourcesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = config.ListResourcesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[config.ListResourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[config.Resource]: + async def async_generator(): + async for page in self.pages: + for response in page.resources: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/__init__.py new file mode 100644 index 000000000000..c8ce21326275 --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ConfigTransport +from .grpc import ConfigGrpcTransport +from .grpc_asyncio import ConfigGrpcAsyncIOTransport +from .rest import ConfigRestInterceptor, ConfigRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ConfigTransport]] +_transport_registry["grpc"] = ConfigGrpcTransport +_transport_registry["grpc_asyncio"] = ConfigGrpcAsyncIOTransport +_transport_registry["rest"] = ConfigRestTransport + +__all__ = ( + "ConfigTransport", + "ConfigGrpcTransport", + "ConfigGrpcAsyncIOTransport", + "ConfigRestTransport", + "ConfigRestInterceptor", +) diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py new file mode 100644 index 000000000000..a5139f21b4ce --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py @@ -0,0 +1,459 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.config_v1 import gapic_version as package_version +from google.cloud.config_v1.types import config + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ConfigTransport(abc.ABC): + """Abstract transport class for Config.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "config.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_deployments: gapic_v1.method.wrap_method( + self.list_deployments, + default_timeout=None, + client_info=client_info, + ), + self.get_deployment: gapic_v1.method.wrap_method( + self.get_deployment, + default_timeout=None, + client_info=client_info, + ), + self.create_deployment: gapic_v1.method.wrap_method( + self.create_deployment, + default_timeout=None, + client_info=client_info, + ), + self.update_deployment: gapic_v1.method.wrap_method( + self.update_deployment, + default_timeout=None, + client_info=client_info, + ), + self.delete_deployment: gapic_v1.method.wrap_method( + self.delete_deployment, + default_timeout=None, + client_info=client_info, + ), + self.list_revisions: gapic_v1.method.wrap_method( + self.list_revisions, + default_timeout=None, + client_info=client_info, + ), + self.get_revision: gapic_v1.method.wrap_method( + self.get_revision, + default_timeout=None, + client_info=client_info, + ), + self.get_resource: gapic_v1.method.wrap_method( + self.get_resource, + default_timeout=None, + client_info=client_info, + ), + self.list_resources: gapic_v1.method.wrap_method( + self.list_resources, + default_timeout=None, + client_info=client_info, + ), + self.export_deployment_statefile: gapic_v1.method.wrap_method( + self.export_deployment_statefile, + default_timeout=None, + client_info=client_info, + ), + self.export_revision_statefile: gapic_v1.method.wrap_method( + self.export_revision_statefile, + default_timeout=None, + client_info=client_info, + ), + self.import_statefile: gapic_v1.method.wrap_method( + self.import_statefile, + default_timeout=None, + client_info=client_info, + ), + self.delete_statefile: gapic_v1.method.wrap_method( + self.delete_statefile, + default_timeout=None, + client_info=client_info, + ), + self.lock_deployment: gapic_v1.method.wrap_method( + self.lock_deployment, + default_timeout=None, + client_info=client_info, + ), + self.unlock_deployment: gapic_v1.method.wrap_method( + self.unlock_deployment, + default_timeout=None, + client_info=client_info, + ), + self.export_lock_info: gapic_v1.method.wrap_method( + self.export_lock_info, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_deployments( + self, + ) -> Callable[ + [config.ListDeploymentsRequest], + Union[ + config.ListDeploymentsResponse, Awaitable[config.ListDeploymentsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_deployment( + self, + ) -> Callable[ + [config.GetDeploymentRequest], + Union[config.Deployment, Awaitable[config.Deployment]], + ]: + raise NotImplementedError() + + @property + def create_deployment( + self, + ) -> Callable[ + [config.CreateDeploymentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_deployment( + self, + ) -> Callable[ + [config.UpdateDeploymentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_deployment( + self, + ) -> Callable[ + [config.DeleteDeploymentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_revisions( + self, + ) -> Callable[ + [config.ListRevisionsRequest], + Union[config.ListRevisionsResponse, Awaitable[config.ListRevisionsResponse]], + ]: + raise NotImplementedError() + + @property + def get_revision( + self, + ) -> Callable[ + [config.GetRevisionRequest], Union[config.Revision, Awaitable[config.Revision]] + ]: + raise NotImplementedError() + + @property + def get_resource( + self, + ) -> Callable[ + [config.GetResourceRequest], Union[config.Resource, Awaitable[config.Resource]] + ]: + raise NotImplementedError() + + @property + def list_resources( + self, + ) -> Callable[ + [config.ListResourcesRequest], + Union[config.ListResourcesResponse, Awaitable[config.ListResourcesResponse]], + ]: + raise NotImplementedError() + + @property + def export_deployment_statefile( + self, + ) -> Callable[ + [config.ExportDeploymentStatefileRequest], + Union[config.Statefile, Awaitable[config.Statefile]], + ]: + raise NotImplementedError() + + @property + def export_revision_statefile( + self, + ) -> Callable[ + [config.ExportRevisionStatefileRequest], + Union[config.Statefile, Awaitable[config.Statefile]], + ]: + raise NotImplementedError() + + @property + def import_statefile( + self, + ) -> Callable[ + [config.ImportStatefileRequest], + Union[config.Statefile, Awaitable[config.Statefile]], + ]: + raise NotImplementedError() + + @property + def delete_statefile( + self, + ) -> Callable[ + [config.DeleteStatefileRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def lock_deployment( + self, + ) -> Callable[ + [config.LockDeploymentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def unlock_deployment( + self, + ) -> Callable[ + [config.UnlockDeploymentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_lock_info( + self, + ) -> Callable[ + [config.ExportLockInfoRequest], + Union[config.LockInfo, Awaitable[config.LockInfo]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ConfigTransport",) diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py new file mode 100644 index 000000000000..16b83b8ff3fa --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py @@ -0,0 +1,867 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.config_v1.types import config + +from .base import DEFAULT_CLIENT_INFO, ConfigTransport + + +class ConfigGrpcTransport(ConfigTransport): + """gRPC backend transport for Config. + + Infrastructure Manager is a managed service that automates + the deployment and management of Google Cloud infrastructure + resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "config.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "config.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_deployments( + self, + ) -> Callable[[config.ListDeploymentsRequest], config.ListDeploymentsResponse]: + r"""Return a callable for the list deployments method over gRPC. + + Lists [Deployment][google.cloud.config.v1.Deployment]s in a + given project and location. + + Returns: + Callable[[~.ListDeploymentsRequest], + ~.ListDeploymentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployments" not in self._stubs: + self._stubs["list_deployments"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ListDeployments", + request_serializer=config.ListDeploymentsRequest.serialize, + response_deserializer=config.ListDeploymentsResponse.deserialize, + ) + return self._stubs["list_deployments"] + + @property + def get_deployment( + self, + ) -> Callable[[config.GetDeploymentRequest], config.Deployment]: + r"""Return a callable for the get deployment method over gRPC. + + Gets details about a + [Deployment][google.cloud.config.v1.Deployment]. + + Returns: + Callable[[~.GetDeploymentRequest], + ~.Deployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment" not in self._stubs: + self._stubs["get_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/GetDeployment", + request_serializer=config.GetDeploymentRequest.serialize, + response_deserializer=config.Deployment.deserialize, + ) + return self._stubs["get_deployment"] + + @property + def create_deployment( + self, + ) -> Callable[[config.CreateDeploymentRequest], operations_pb2.Operation]: + r"""Return a callable for the create deployment method over gRPC. + + Creates a [Deployment][google.cloud.config.v1.Deployment]. + + Returns: + Callable[[~.CreateDeploymentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deployment" not in self._stubs: + self._stubs["create_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/CreateDeployment", + request_serializer=config.CreateDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deployment"] + + @property + def update_deployment( + self, + ) -> Callable[[config.UpdateDeploymentRequest], operations_pb2.Operation]: + r"""Return a callable for the update deployment method over gRPC. + + Updates a [Deployment][google.cloud.config.v1.Deployment]. + + Returns: + Callable[[~.UpdateDeploymentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deployment" not in self._stubs: + self._stubs["update_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/UpdateDeployment", + request_serializer=config.UpdateDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deployment"] + + @property + def delete_deployment( + self, + ) -> Callable[[config.DeleteDeploymentRequest], operations_pb2.Operation]: + r"""Return a callable for the delete deployment method over gRPC. + + Deletes a [Deployment][google.cloud.config.v1.Deployment]. + + Returns: + Callable[[~.DeleteDeploymentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deployment" not in self._stubs: + self._stubs["delete_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/DeleteDeployment", + request_serializer=config.DeleteDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deployment"] + + @property + def list_revisions( + self, + ) -> Callable[[config.ListRevisionsRequest], config.ListRevisionsResponse]: + r"""Return a callable for the list revisions method over gRPC. + + Lists [Revision][google.cloud.config.v1.Revision]s of a + deployment. + + Returns: + Callable[[~.ListRevisionsRequest], + ~.ListRevisionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_revisions" not in self._stubs: + self._stubs["list_revisions"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ListRevisions", + request_serializer=config.ListRevisionsRequest.serialize, + response_deserializer=config.ListRevisionsResponse.deserialize, + ) + return self._stubs["list_revisions"] + + @property + def get_revision(self) -> Callable[[config.GetRevisionRequest], config.Revision]: + r"""Return a callable for the get revision method over gRPC. + + Gets details about a + [Revision][google.cloud.config.v1.Revision]. + + Returns: + Callable[[~.GetRevisionRequest], + ~.Revision]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_revision" not in self._stubs: + self._stubs["get_revision"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/GetRevision", + request_serializer=config.GetRevisionRequest.serialize, + response_deserializer=config.Revision.deserialize, + ) + return self._stubs["get_revision"] + + @property + def get_resource(self) -> Callable[[config.GetResourceRequest], config.Resource]: + r"""Return a callable for the get resource method over gRPC. + + Gets details about a [Resource][google.cloud.config.v1.Resource] + deployed by Infra Manager. + + Returns: + Callable[[~.GetResourceRequest], + ~.Resource]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_resource" not in self._stubs: + self._stubs["get_resource"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/GetResource", + request_serializer=config.GetResourceRequest.serialize, + response_deserializer=config.Resource.deserialize, + ) + return self._stubs["get_resource"] + + @property + def list_resources( + self, + ) -> Callable[[config.ListResourcesRequest], config.ListResourcesResponse]: + r"""Return a callable for the list resources method over gRPC. + + Lists [Resource][google.cloud.config.v1.Resource]s in a given + revision. + + Returns: + Callable[[~.ListResourcesRequest], + ~.ListResourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_resources" not in self._stubs: + self._stubs["list_resources"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ListResources", + request_serializer=config.ListResourcesRequest.serialize, + response_deserializer=config.ListResourcesResponse.deserialize, + ) + return self._stubs["list_resources"] + + @property + def export_deployment_statefile( + self, + ) -> Callable[[config.ExportDeploymentStatefileRequest], config.Statefile]: + r"""Return a callable for the export deployment statefile method over gRPC. + + Exports Terraform state file from a given deployment. + + Returns: + Callable[[~.ExportDeploymentStatefileRequest], + ~.Statefile]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_deployment_statefile" not in self._stubs: + self._stubs["export_deployment_statefile"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ExportDeploymentStatefile", + request_serializer=config.ExportDeploymentStatefileRequest.serialize, + response_deserializer=config.Statefile.deserialize, + ) + return self._stubs["export_deployment_statefile"] + + @property + def export_revision_statefile( + self, + ) -> Callable[[config.ExportRevisionStatefileRequest], config.Statefile]: + r"""Return a callable for the export revision statefile method over gRPC. + + Exports Terraform state file from a given revision. + + Returns: + Callable[[~.ExportRevisionStatefileRequest], + ~.Statefile]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_revision_statefile" not in self._stubs: + self._stubs["export_revision_statefile"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ExportRevisionStatefile", + request_serializer=config.ExportRevisionStatefileRequest.serialize, + response_deserializer=config.Statefile.deserialize, + ) + return self._stubs["export_revision_statefile"] + + @property + def import_statefile( + self, + ) -> Callable[[config.ImportStatefileRequest], config.Statefile]: + r"""Return a callable for the import statefile method over gRPC. + + Imports Terraform state file in a given deployment. + The state file does not take effect until the Deployment + has been unlocked. + + Returns: + Callable[[~.ImportStatefileRequest], + ~.Statefile]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_statefile" not in self._stubs: + self._stubs["import_statefile"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ImportStatefile", + request_serializer=config.ImportStatefileRequest.serialize, + response_deserializer=config.Statefile.deserialize, + ) + return self._stubs["import_statefile"] + + @property + def delete_statefile( + self, + ) -> Callable[[config.DeleteStatefileRequest], empty_pb2.Empty]: + r"""Return a callable for the delete statefile method over gRPC. + + Deletes Terraform state file in a given deployment. + + Returns: + Callable[[~.DeleteStatefileRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_statefile" not in self._stubs: + self._stubs["delete_statefile"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/DeleteStatefile", + request_serializer=config.DeleteStatefileRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_statefile"] + + @property + def lock_deployment( + self, + ) -> Callable[[config.LockDeploymentRequest], operations_pb2.Operation]: + r"""Return a callable for the lock deployment method over gRPC. + + Locks a deployment. + + Returns: + Callable[[~.LockDeploymentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lock_deployment" not in self._stubs: + self._stubs["lock_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/LockDeployment", + request_serializer=config.LockDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["lock_deployment"] + + @property + def unlock_deployment( + self, + ) -> Callable[[config.UnlockDeploymentRequest], operations_pb2.Operation]: + r"""Return a callable for the unlock deployment method over gRPC. + + Unlocks a locked deployment. + + Returns: + Callable[[~.UnlockDeploymentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "unlock_deployment" not in self._stubs: + self._stubs["unlock_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/UnlockDeployment", + request_serializer=config.UnlockDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["unlock_deployment"] + + @property + def export_lock_info( + self, + ) -> Callable[[config.ExportLockInfoRequest], config.LockInfo]: + r"""Return a callable for the export lock info method over gRPC. + + Exports the lock info on a locked deployment. + + Returns: + Callable[[~.ExportLockInfoRequest], + ~.LockInfo]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_lock_info" not in self._stubs: + self._stubs["export_lock_info"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ExportLockInfo", + request_serializer=config.ExportLockInfoRequest.serialize, + response_deserializer=config.LockInfo.deserialize, + ) + return self._stubs["export_lock_info"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ConfigGrpcTransport",) diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py new file mode 100644 index 000000000000..84eaa9ba992c --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py @@ -0,0 +1,888 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.config_v1.types import config + +from .base import DEFAULT_CLIENT_INFO, ConfigTransport +from .grpc import ConfigGrpcTransport + + +class ConfigGrpcAsyncIOTransport(ConfigTransport): + """gRPC AsyncIO backend transport for Config. + + Infrastructure Manager is a managed service that automates + the deployment and management of Google Cloud infrastructure + resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "config.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "config.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_deployments( + self, + ) -> Callable[ + [config.ListDeploymentsRequest], Awaitable[config.ListDeploymentsResponse] + ]: + r"""Return a callable for the list deployments method over gRPC. + + Lists [Deployment][google.cloud.config.v1.Deployment]s in a + given project and location. + + Returns: + Callable[[~.ListDeploymentsRequest], + Awaitable[~.ListDeploymentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployments" not in self._stubs: + self._stubs["list_deployments"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ListDeployments", + request_serializer=config.ListDeploymentsRequest.serialize, + response_deserializer=config.ListDeploymentsResponse.deserialize, + ) + return self._stubs["list_deployments"] + + @property + def get_deployment( + self, + ) -> Callable[[config.GetDeploymentRequest], Awaitable[config.Deployment]]: + r"""Return a callable for the get deployment method over gRPC. + + Gets details about a + [Deployment][google.cloud.config.v1.Deployment]. + + Returns: + Callable[[~.GetDeploymentRequest], + Awaitable[~.Deployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment" not in self._stubs: + self._stubs["get_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/GetDeployment", + request_serializer=config.GetDeploymentRequest.serialize, + response_deserializer=config.Deployment.deserialize, + ) + return self._stubs["get_deployment"] + + @property + def create_deployment( + self, + ) -> Callable[ + [config.CreateDeploymentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create deployment method over gRPC. + + Creates a [Deployment][google.cloud.config.v1.Deployment]. + + Returns: + Callable[[~.CreateDeploymentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deployment" not in self._stubs: + self._stubs["create_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/CreateDeployment", + request_serializer=config.CreateDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deployment"] + + @property + def update_deployment( + self, + ) -> Callable[ + [config.UpdateDeploymentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update deployment method over gRPC. + + Updates a [Deployment][google.cloud.config.v1.Deployment]. + + Returns: + Callable[[~.UpdateDeploymentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deployment" not in self._stubs: + self._stubs["update_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/UpdateDeployment", + request_serializer=config.UpdateDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deployment"] + + @property + def delete_deployment( + self, + ) -> Callable[ + [config.DeleteDeploymentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete deployment method over gRPC. + + Deletes a [Deployment][google.cloud.config.v1.Deployment]. + + Returns: + Callable[[~.DeleteDeploymentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deployment" not in self._stubs: + self._stubs["delete_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/DeleteDeployment", + request_serializer=config.DeleteDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deployment"] + + @property + def list_revisions( + self, + ) -> Callable[ + [config.ListRevisionsRequest], Awaitable[config.ListRevisionsResponse] + ]: + r"""Return a callable for the list revisions method over gRPC. + + Lists [Revision][google.cloud.config.v1.Revision]s of a + deployment. + + Returns: + Callable[[~.ListRevisionsRequest], + Awaitable[~.ListRevisionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_revisions" not in self._stubs: + self._stubs["list_revisions"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ListRevisions", + request_serializer=config.ListRevisionsRequest.serialize, + response_deserializer=config.ListRevisionsResponse.deserialize, + ) + return self._stubs["list_revisions"] + + @property + def get_revision( + self, + ) -> Callable[[config.GetRevisionRequest], Awaitable[config.Revision]]: + r"""Return a callable for the get revision method over gRPC. + + Gets details about a + [Revision][google.cloud.config.v1.Revision]. + + Returns: + Callable[[~.GetRevisionRequest], + Awaitable[~.Revision]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_revision" not in self._stubs: + self._stubs["get_revision"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/GetRevision", + request_serializer=config.GetRevisionRequest.serialize, + response_deserializer=config.Revision.deserialize, + ) + return self._stubs["get_revision"] + + @property + def get_resource( + self, + ) -> Callable[[config.GetResourceRequest], Awaitable[config.Resource]]: + r"""Return a callable for the get resource method over gRPC. + + Gets details about a [Resource][google.cloud.config.v1.Resource] + deployed by Infra Manager. + + Returns: + Callable[[~.GetResourceRequest], + Awaitable[~.Resource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_resource" not in self._stubs: + self._stubs["get_resource"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/GetResource", + request_serializer=config.GetResourceRequest.serialize, + response_deserializer=config.Resource.deserialize, + ) + return self._stubs["get_resource"] + + @property + def list_resources( + self, + ) -> Callable[ + [config.ListResourcesRequest], Awaitable[config.ListResourcesResponse] + ]: + r"""Return a callable for the list resources method over gRPC. + + Lists [Resource][google.cloud.config.v1.Resource]s in a given + revision. + + Returns: + Callable[[~.ListResourcesRequest], + Awaitable[~.ListResourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_resources" not in self._stubs: + self._stubs["list_resources"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ListResources", + request_serializer=config.ListResourcesRequest.serialize, + response_deserializer=config.ListResourcesResponse.deserialize, + ) + return self._stubs["list_resources"] + + @property + def export_deployment_statefile( + self, + ) -> Callable[ + [config.ExportDeploymentStatefileRequest], Awaitable[config.Statefile] + ]: + r"""Return a callable for the export deployment statefile method over gRPC. + + Exports Terraform state file from a given deployment. + + Returns: + Callable[[~.ExportDeploymentStatefileRequest], + Awaitable[~.Statefile]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_deployment_statefile" not in self._stubs: + self._stubs["export_deployment_statefile"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ExportDeploymentStatefile", + request_serializer=config.ExportDeploymentStatefileRequest.serialize, + response_deserializer=config.Statefile.deserialize, + ) + return self._stubs["export_deployment_statefile"] + + @property + def export_revision_statefile( + self, + ) -> Callable[[config.ExportRevisionStatefileRequest], Awaitable[config.Statefile]]: + r"""Return a callable for the export revision statefile method over gRPC. + + Exports Terraform state file from a given revision. + + Returns: + Callable[[~.ExportRevisionStatefileRequest], + Awaitable[~.Statefile]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_revision_statefile" not in self._stubs: + self._stubs["export_revision_statefile"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ExportRevisionStatefile", + request_serializer=config.ExportRevisionStatefileRequest.serialize, + response_deserializer=config.Statefile.deserialize, + ) + return self._stubs["export_revision_statefile"] + + @property + def import_statefile( + self, + ) -> Callable[[config.ImportStatefileRequest], Awaitable[config.Statefile]]: + r"""Return a callable for the import statefile method over gRPC. + + Imports Terraform state file in a given deployment. + The state file does not take effect until the Deployment + has been unlocked. + + Returns: + Callable[[~.ImportStatefileRequest], + Awaitable[~.Statefile]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_statefile" not in self._stubs: + self._stubs["import_statefile"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ImportStatefile", + request_serializer=config.ImportStatefileRequest.serialize, + response_deserializer=config.Statefile.deserialize, + ) + return self._stubs["import_statefile"] + + @property + def delete_statefile( + self, + ) -> Callable[[config.DeleteStatefileRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete statefile method over gRPC. + + Deletes Terraform state file in a given deployment. + + Returns: + Callable[[~.DeleteStatefileRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_statefile" not in self._stubs: + self._stubs["delete_statefile"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/DeleteStatefile", + request_serializer=config.DeleteStatefileRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_statefile"] + + @property + def lock_deployment( + self, + ) -> Callable[[config.LockDeploymentRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the lock deployment method over gRPC. + + Locks a deployment. + + Returns: + Callable[[~.LockDeploymentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lock_deployment" not in self._stubs: + self._stubs["lock_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/LockDeployment", + request_serializer=config.LockDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["lock_deployment"] + + @property + def unlock_deployment( + self, + ) -> Callable[ + [config.UnlockDeploymentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the unlock deployment method over gRPC. + + Unlocks a locked deployment. + + Returns: + Callable[[~.UnlockDeploymentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "unlock_deployment" not in self._stubs: + self._stubs["unlock_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/UnlockDeployment", + request_serializer=config.UnlockDeploymentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["unlock_deployment"] + + @property + def export_lock_info( + self, + ) -> Callable[[config.ExportLockInfoRequest], Awaitable[config.LockInfo]]: + r"""Return a callable for the export lock info method over gRPC. + + Exports the lock info on a locked deployment. + + Returns: + Callable[[~.ExportLockInfoRequest], + Awaitable[~.LockInfo]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_lock_info" not in self._stubs: + self._stubs["export_lock_info"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ExportLockInfo", + request_serializer=config.ExportLockInfoRequest.serialize, + response_deserializer=config.LockInfo.deserialize, + ) + return self._stubs["export_lock_info"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("ConfigGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py new file mode 100644 index 000000000000..a3e4de33b0ca --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py @@ -0,0 +1,3132 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.config_v1.types import config + +from .base import ConfigTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ConfigRestInterceptor: + """Interceptor for Config. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ConfigRestTransport. + + .. code-block:: python + class MyCustomConfigInterceptor(ConfigRestInterceptor): + def pre_create_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_statefile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_export_deployment_statefile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_deployment_statefile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_export_lock_info(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_lock_info(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_export_revision_statefile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_revision_statefile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_resource(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_resource(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_revision(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_revision(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_import_statefile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_statefile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_deployments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deployments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_resources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_resources(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_revisions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_revisions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_lock_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_lock_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_unlock_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_unlock_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ConfigRestTransport(interceptor=MyCustomConfigInterceptor()) + client = ConfigClient(transport=transport) + + + """ + + def pre_create_deployment( + self, + request: config.CreateDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[config.CreateDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_create_deployment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_deployment + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_delete_deployment( + self, + request: config.DeleteDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[config.DeleteDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_delete_deployment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_deployment + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_delete_statefile( + self, + request: config.DeleteStatefileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[config.DeleteStatefileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_statefile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def pre_export_deployment_statefile( + self, + request: config.ExportDeploymentStatefileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[config.ExportDeploymentStatefileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_deployment_statefile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_export_deployment_statefile( + self, response: config.Statefile + ) -> config.Statefile: + """Post-rpc interceptor for export_deployment_statefile + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_export_lock_info( + self, request: config.ExportLockInfoRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.ExportLockInfoRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_lock_info + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_export_lock_info(self, response: config.LockInfo) -> config.LockInfo: + """Post-rpc interceptor for export_lock_info + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_export_revision_statefile( + self, + request: config.ExportRevisionStatefileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[config.ExportRevisionStatefileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_revision_statefile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_export_revision_statefile( + self, response: config.Statefile + ) -> config.Statefile: + """Post-rpc interceptor for export_revision_statefile + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_get_deployment( + self, request: config.GetDeploymentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.GetDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_get_deployment(self, response: config.Deployment) -> config.Deployment: + """Post-rpc interceptor for get_deployment + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_get_resource( + self, request: config.GetResourceRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.GetResourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_resource + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_get_resource(self, response: config.Resource) -> config.Resource: + """Post-rpc interceptor for get_resource + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_get_revision( + self, request: config.GetRevisionRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.GetRevisionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_revision + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_get_revision(self, response: config.Revision) -> config.Revision: + """Post-rpc interceptor for get_revision + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_import_statefile( + self, + request: config.ImportStatefileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[config.ImportStatefileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_statefile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_import_statefile(self, response: config.Statefile) -> config.Statefile: + """Post-rpc interceptor for import_statefile + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_list_deployments( + self, + request: config.ListDeploymentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[config.ListDeploymentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deployments + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_list_deployments( + self, response: config.ListDeploymentsResponse + ) -> config.ListDeploymentsResponse: + """Post-rpc interceptor for list_deployments + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_list_resources( + self, request: config.ListResourcesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.ListResourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_resources + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_list_resources( + self, response: config.ListResourcesResponse + ) -> config.ListResourcesResponse: + """Post-rpc interceptor for list_resources + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_list_revisions( + self, request: config.ListRevisionsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.ListRevisionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_revisions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_list_revisions( + self, response: config.ListRevisionsResponse + ) -> config.ListRevisionsResponse: + """Post-rpc interceptor for list_revisions + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_lock_deployment( + self, request: config.LockDeploymentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.LockDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for lock_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_lock_deployment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for lock_deployment + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_unlock_deployment( + self, + request: config.UnlockDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[config.UnlockDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for unlock_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_unlock_deployment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for unlock_deployment + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_update_deployment( + self, + request: config.UpdateDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[config.UpdateDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_update_deployment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_deployment + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ConfigRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ConfigRestInterceptor + + +class ConfigRestTransport(ConfigTransport): + """REST backend transport for Config. + + Infrastructure Manager is a managed service that automates + the deployment and management of Google Cloud infrastructure + resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "config.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ConfigRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ConfigRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateDeployment(ConfigRestStub): + def __hash__(self): + return hash("CreateDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "deploymentId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.CreateDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create deployment method over HTTP. + + Args: + request (~.config.CreateDeploymentRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/deployments", + "body": "deployment", + }, + ] + request, metadata = self._interceptor.pre_create_deployment( + request, metadata + ) + pb_request = config.CreateDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_deployment(resp) + return resp + + class _DeleteDeployment(ConfigRestStub): + def __hash__(self): + return hash("DeleteDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.DeleteDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete deployment method over HTTP. + + Args: + request (~.config.DeleteDeploymentRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/deployments/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_deployment( + request, metadata + ) + pb_request = config.DeleteDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_deployment(resp) + return resp + + class _DeleteStatefile(ConfigRestStub): + def __hash__(self): + return hash("DeleteStatefile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.DeleteStatefileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete statefile method over HTTP. + + Args: + request (~.config.DeleteStatefileRequest): + The request object. A request to delete a state file + passed to a 'DeleteStatefile' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/deployments/*}:deleteState", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_delete_statefile( + request, metadata + ) + pb_request = config.DeleteStatefileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ExportDeploymentStatefile(ConfigRestStub): + def __hash__(self): + return hash("ExportDeploymentStatefile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.ExportDeploymentStatefileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Statefile: + r"""Call the export deployment + statefile method over HTTP. + + Args: + request (~.config.ExportDeploymentStatefileRequest): + The request object. A request to export a state file + passed to a 'ExportDeploymentStatefile' + call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.Statefile: + Contains info about a Terraform state + file + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/deployments/*}:exportState", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_deployment_statefile( + request, metadata + ) + pb_request = config.ExportDeploymentStatefileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.Statefile() + pb_resp = config.Statefile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_deployment_statefile(resp) + return resp + + class _ExportLockInfo(ConfigRestStub): + def __hash__(self): + return hash("ExportLockInfo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.ExportLockInfoRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.LockInfo: + r"""Call the export lock info method over HTTP. + + Args: + request (~.config.ExportLockInfoRequest): + The request object. A request to get a state file lock + info passed to a 'ExportLockInfo' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.LockInfo: + Details about the lock which locked + the deployment. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deployments/*}:exportLock", + }, + ] + request, metadata = self._interceptor.pre_export_lock_info( + request, metadata + ) + pb_request = config.ExportLockInfoRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.LockInfo() + pb_resp = config.LockInfo.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_lock_info(resp) + return resp + + class _ExportRevisionStatefile(ConfigRestStub): + def __hash__(self): + return hash("ExportRevisionStatefile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.ExportRevisionStatefileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Statefile: + r"""Call the export revision statefile method over HTTP. + + Args: + request (~.config.ExportRevisionStatefileRequest): + The request object. A request to export a state file + passed to a 'ExportRevisionStatefile' + call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.Statefile: + Contains info about a Terraform state + file + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/deployments/*/revisions/*}:exportState", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_revision_statefile( + request, metadata + ) + pb_request = config.ExportRevisionStatefileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.Statefile() + pb_resp = config.Statefile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_revision_statefile(resp) + return resp + + class _GetDeployment(ConfigRestStub): + def __hash__(self): + return hash("GetDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.GetDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Deployment: + r"""Call the get deployment method over HTTP. + + Args: + request (~.config.GetDeploymentRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.Deployment: + A Deployment is a group of resources + and configs managed and provisioned by + Infra Manager. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deployments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_deployment(request, metadata) + pb_request = config.GetDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.Deployment() + pb_resp = config.Deployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deployment(resp) + return resp + + class _GetResource(ConfigRestStub): + def __hash__(self): + return hash("GetResource") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.GetResourceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Resource: + r"""Call the get resource method over HTTP. + + Args: + request (~.config.GetResourceRequest): + The request object. A request to get a Resource from a + 'GetResource' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.Resource: + Resource represents a Google Cloud + Platform resource actuated by IM. + Resources are child resources of + Revisions. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deployments/*/revisions/*/resources/*}", + }, + ] + request, metadata = self._interceptor.pre_get_resource(request, metadata) + pb_request = config.GetResourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.Resource() + pb_resp = config.Resource.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_resource(resp) + return resp + + class _GetRevision(ConfigRestStub): + def __hash__(self): + return hash("GetRevision") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.GetRevisionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Revision: + r"""Call the get revision method over HTTP. + + Args: + request (~.config.GetRevisionRequest): + The request object. A request to get a Revision from a + 'GetRevision' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.Revision: + A child resource of a Deployment + generated by a 'CreateDeployment' or + 'UpdateDeployment' call. Each Revision + contains metadata pertaining to a + snapshot of a particular Deployment. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deployments/*/revisions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_revision(request, metadata) + pb_request = config.GetRevisionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.Revision() + pb_resp = config.Revision.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_revision(resp) + return resp + + class _ImportStatefile(ConfigRestStub): + def __hash__(self): + return hash("ImportStatefile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.ImportStatefileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Statefile: + r"""Call the import statefile method over HTTP. + + Args: + request (~.config.ImportStatefileRequest): + The request object. A request to import a state file + passed to a 'ImportStatefile' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.Statefile: + Contains info about a Terraform state + file + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/deployments/*}:importState", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_statefile( + request, metadata + ) + pb_request = config.ImportStatefileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.Statefile() + pb_resp = config.Statefile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_statefile(resp) + return resp + + class _ListDeployments(ConfigRestStub): + def __hash__(self): + return hash("ListDeployments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.ListDeploymentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.ListDeploymentsResponse: + r"""Call the list deployments method over HTTP. + + Args: + request (~.config.ListDeploymentsRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.ListDeploymentsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/deployments", + }, + ] + request, metadata = self._interceptor.pre_list_deployments( + request, metadata + ) + pb_request = config.ListDeploymentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.ListDeploymentsResponse() + pb_resp = config.ListDeploymentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deployments(resp) + return resp + + class _ListResources(ConfigRestStub): + def __hash__(self): + return hash("ListResources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.ListResourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.ListResourcesResponse: + r"""Call the list resources method over HTTP. + + Args: + request (~.config.ListResourcesRequest): + The request object. A request to list Resources passed to + a 'ListResources' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.ListResourcesResponse: + A response to a 'ListResources' call. + Contains a list of Resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/deployments/*/revisions/*}/resources", + }, + ] + request, metadata = self._interceptor.pre_list_resources(request, metadata) + pb_request = config.ListResourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.ListResourcesResponse() + pb_resp = config.ListResourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_resources(resp) + return resp + + class _ListRevisions(ConfigRestStub): + def __hash__(self): + return hash("ListRevisions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.ListRevisionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.ListRevisionsResponse: + r"""Call the list revisions method over HTTP. + + Args: + request (~.config.ListRevisionsRequest): + The request object. A request to list Revisions passed to + a 'ListRevisions' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.ListRevisionsResponse: + A response to a 'ListRevisions' call. + Contains a list of Revisions. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/deployments/*}/revisions", + }, + ] + request, metadata = self._interceptor.pre_list_revisions(request, metadata) + pb_request = config.ListRevisionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.ListRevisionsResponse() + pb_resp = config.ListRevisionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_revisions(resp) + return resp + + class _LockDeployment(ConfigRestStub): + def __hash__(self): + return hash("LockDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.LockDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the lock deployment method over HTTP. + + Args: + request (~.config.LockDeploymentRequest): + The request object. A request to lock a deployment passed + to a 'LockDeployment' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/deployments/*}:lock", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_lock_deployment(request, metadata) + pb_request = config.LockDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_lock_deployment(resp) + return resp + + class _UnlockDeployment(ConfigRestStub): + def __hash__(self): + return hash("UnlockDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.UnlockDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the unlock deployment method over HTTP. + + Args: + request (~.config.UnlockDeploymentRequest): + The request object. A request to unlock a state file + passed to a 'UnlockDeployment' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/deployments/*}:unlock", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_unlock_deployment( + request, metadata + ) + pb_request = config.UnlockDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_unlock_deployment(resp) + return resp + + class _UpdateDeployment(ConfigRestStub): + def __hash__(self): + return hash("UpdateDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.UpdateDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update deployment method over HTTP. + + Args: + request (~.config.UpdateDeploymentRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{deployment.name=projects/*/locations/*/deployments/*}", + "body": "deployment", + }, + ] + request, metadata = self._interceptor.pre_update_deployment( + request, metadata + ) + pb_request = config.UpdateDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_deployment(resp) + return resp + + @property + def create_deployment( + self, + ) -> Callable[[config.CreateDeploymentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_deployment( + self, + ) -> Callable[[config.DeleteDeploymentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_statefile( + self, + ) -> Callable[[config.DeleteStatefileRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteStatefile(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_deployment_statefile( + self, + ) -> Callable[[config.ExportDeploymentStatefileRequest], config.Statefile]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportDeploymentStatefile(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_lock_info( + self, + ) -> Callable[[config.ExportLockInfoRequest], config.LockInfo]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportLockInfo(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_revision_statefile( + self, + ) -> Callable[[config.ExportRevisionStatefileRequest], config.Statefile]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportRevisionStatefile(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_deployment( + self, + ) -> Callable[[config.GetDeploymentRequest], config.Deployment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_resource(self) -> Callable[[config.GetResourceRequest], config.Resource]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetResource(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_revision(self) -> Callable[[config.GetRevisionRequest], config.Revision]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRevision(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_statefile( + self, + ) -> Callable[[config.ImportStatefileRequest], config.Statefile]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportStatefile(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_deployments( + self, + ) -> Callable[[config.ListDeploymentsRequest], config.ListDeploymentsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeployments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_resources( + self, + ) -> Callable[[config.ListResourcesRequest], config.ListResourcesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListResources(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_revisions( + self, + ) -> Callable[[config.ListRevisionsRequest], config.ListRevisionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRevisions(self._session, self._host, self._interceptor) # type: ignore + + @property + def lock_deployment( + self, + ) -> Callable[[config.LockDeploymentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LockDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def unlock_deployment( + self, + ) -> Callable[[config.UnlockDeploymentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UnlockDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_deployment( + self, + ) -> Callable[[config.UpdateDeploymentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ConfigRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ConfigRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(ConfigRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/deployments/*}:getIamPolicy", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(ConfigRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/deployments/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(ConfigRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/deployments/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ConfigRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(ConfigRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ConfigRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ConfigRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ConfigRestTransport",) diff --git a/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py new file mode 100644 index 000000000000..5ce94d9c52f7 --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .config import ( + ApplyResults, + CreateDeploymentRequest, + DeleteDeploymentRequest, + DeleteStatefileRequest, + Deployment, + DeploymentOperationMetadata, + ExportDeploymentStatefileRequest, + ExportLockInfoRequest, + ExportRevisionStatefileRequest, + GetDeploymentRequest, + GetResourceRequest, + GetRevisionRequest, + GitSource, + ImportStatefileRequest, + ListDeploymentsRequest, + ListDeploymentsResponse, + ListResourcesRequest, + ListResourcesResponse, + ListRevisionsRequest, + ListRevisionsResponse, + LockDeploymentRequest, + LockInfo, + OperationMetadata, + Resource, + ResourceCAIInfo, + ResourceTerraformInfo, + Revision, + Statefile, + TerraformBlueprint, + TerraformError, + TerraformOutput, + TerraformVariable, + UnlockDeploymentRequest, + UpdateDeploymentRequest, +) + +__all__ = ( + "ApplyResults", + "CreateDeploymentRequest", + "DeleteDeploymentRequest", + "DeleteStatefileRequest", + "Deployment", + "DeploymentOperationMetadata", + "ExportDeploymentStatefileRequest", + "ExportLockInfoRequest", + "ExportRevisionStatefileRequest", + "GetDeploymentRequest", + "GetResourceRequest", + "GetRevisionRequest", + "GitSource", + "ImportStatefileRequest", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "ListResourcesRequest", + "ListResourcesResponse", + "ListRevisionsRequest", + "ListRevisionsResponse", + "LockDeploymentRequest", + "LockInfo", + "OperationMetadata", + "Resource", + "ResourceCAIInfo", + "ResourceTerraformInfo", + "Revision", + "Statefile", + "TerraformBlueprint", + "TerraformError", + "TerraformOutput", + "TerraformVariable", + "UnlockDeploymentRequest", + "UpdateDeploymentRequest", +) diff --git a/packages/google-cloud-config/google/cloud/config_v1/types/config.py b/packages/google-cloud-config/google/cloud/config_v1/types/config.py new file mode 100644 index 000000000000..91145d025753 --- /dev/null +++ b/packages/google-cloud-config/google/cloud/config_v1/types/config.py @@ -0,0 +1,1802 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.config.v1", + manifest={ + "Deployment", + "TerraformBlueprint", + "TerraformVariable", + "ApplyResults", + "TerraformOutput", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "GetDeploymentRequest", + "ListRevisionsRequest", + "ListRevisionsResponse", + "GetRevisionRequest", + "CreateDeploymentRequest", + "UpdateDeploymentRequest", + "DeleteDeploymentRequest", + "OperationMetadata", + "Revision", + "TerraformError", + "GitSource", + "DeploymentOperationMetadata", + "Resource", + "ResourceTerraformInfo", + "ResourceCAIInfo", + "GetResourceRequest", + "ListResourcesRequest", + "ListResourcesResponse", + "Statefile", + "ExportDeploymentStatefileRequest", + "ExportRevisionStatefileRequest", + "ImportStatefileRequest", + "DeleteStatefileRequest", + "LockDeploymentRequest", + "UnlockDeploymentRequest", + "ExportLockInfoRequest", + "LockInfo", + }, +) + + +class Deployment(proto.Message): + r"""A Deployment is a group of resources and configs managed and + provisioned by Infra Manager. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + terraform_blueprint (google.cloud.config_v1.types.TerraformBlueprint): + A blueprint described using Terraform's + HashiCorp Configuration Language as a root + module. + + This field is a member of `oneof`_ ``blueprint``. + name (str): + Resource name of the deployment. Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the deployment was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the deployment was + last modified. + labels (MutableMapping[str, str]): + User-defined metadata for the deployment. + state (google.cloud.config_v1.types.Deployment.State): + Output only. Current state of the deployment. + latest_revision (str): + Output only. Revision name that was most recently applied. + Format: + ``projects/{project}/locations/{location}/deployments/{deployment}/ revisions/{revision}`` + state_detail (str): + Output only. Additional information regarding + the current state. + error_code (google.cloud.config_v1.types.Deployment.ErrorCode): + Output only. Error code describing errors + that may have occurred. + delete_results (google.cloud.config_v1.types.ApplyResults): + Output only. Location of artifacts from a + DeleteDeployment operation. + delete_build (str): + Output only. Cloud Build instance UUID + associated with deleting this deployment. + delete_logs (str): + Output only. Location of Cloud Build logs in Google Cloud + Storage, populated when deleting this deployment. Format: + ``gs://{bucket}/{object}``. + tf_errors (MutableSequence[google.cloud.config_v1.types.TerraformError]): + Output only. Errors encountered when deleting this + deployment. Errors are truncated to 10 entries, see + ``delete_results`` and ``error_logs`` for full details. + error_logs (str): + Output only. Location of Terraform error logs in Google + Cloud Storage. Format: ``gs://{bucket}/{object}``. + artifacts_gcs_bucket (str): + Optional. User-defined location of Cloud Build logs and + artifacts in Google Cloud Storage. Format: + ``gs://{bucket}/{folder}`` + + A default bucket will be bootstrapped if the field is not + set or empty. Default bucket format: + ``gs://--blueprint-config`` + Constraints: + + - The bucket needs to be in the same project as the + deployment + - The path cannot be within the path of ``gcs_source`` + - The field cannot be updated, including changing its + presence + + This field is a member of `oneof`_ ``_artifacts_gcs_bucket``. + service_account (str): + Optional. User-specified Service Account (SA) credentials to + be used when actuating resources. Format: + ``projects/{projectID}/serviceAccounts/{serviceAccount}`` + + This field is a member of `oneof`_ ``_service_account``. + import_existing_resources (bool): + By default, Infra Manager will return a + failure when Terraform encounters a 409 code + (resource conflict error) during actuation. If + this flag is set to true, Infra Manager will + instead attempt to automatically import the + resource into the Terraform state (for supported + resource types) and continue actuation. + + Not all resource types are supported, refer to + documentation. + + This field is a member of `oneof`_ ``_import_existing_resources``. + worker_pool (str): + Optional. The user-specified Cloud Build worker pool + resource in which the Cloud Build job will execute. Format: + ``projects/{project}/locations/{location}/workerPools/{workerPoolId}``. + If this field is unspecified, the default Cloud Build worker + pool will be used. + + This field is a member of `oneof`_ ``_worker_pool``. + lock_state (google.cloud.config_v1.types.Deployment.LockState): + Output only. Current lock state of the + deployment. + """ + + class State(proto.Enum): + r"""Possible states of a deployment. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + CREATING (1): + The deployment is being created. + ACTIVE (2): + The deployment is healthy. + UPDATING (3): + The deployment is being updated. + DELETING (4): + The deployment is being deleted. + FAILED (5): + The deployment has encountered an unexpected + error. + SUSPENDED (6): + The deployment is no longer being actively + reconciled. This may be the result of recovering + the project after deletion. + DELETED (7): + The deployment has been deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + UPDATING = 3 + DELETING = 4 + FAILED = 5 + SUSPENDED = 6 + DELETED = 7 + + class ErrorCode(proto.Enum): + r"""Possible errors that can occur with deployments. + + Values: + ERROR_CODE_UNSPECIFIED (0): + No error code was specified. + REVISION_FAILED (1): + The revision failed. See Revision for more + details. + CLOUD_BUILD_PERMISSION_DENIED (3): + Cloud Build failed due to a permission issue. + DELETE_BUILD_API_FAILED (5): + Cloud Build job associated with a deployment + deletion could not be started. + DELETE_BUILD_RUN_FAILED (6): + Cloud Build job associated with a deployment + deletion was started but failed. + BUCKET_CREATION_PERMISSION_DENIED (7): + Cloud Storage bucket creation failed due to a + permission issue. + BUCKET_CREATION_FAILED (8): + Cloud Storage bucket creation failed due to + an issue unrelated to permissions. + """ + ERROR_CODE_UNSPECIFIED = 0 + REVISION_FAILED = 1 + CLOUD_BUILD_PERMISSION_DENIED = 3 + DELETE_BUILD_API_FAILED = 5 + DELETE_BUILD_RUN_FAILED = 6 + BUCKET_CREATION_PERMISSION_DENIED = 7 + BUCKET_CREATION_FAILED = 8 + + class LockState(proto.Enum): + r"""Possible lock states of a deployment. + + Values: + LOCK_STATE_UNSPECIFIED (0): + The default value. This value is used if the + lock state is omitted. + LOCKED (1): + The deployment is locked. + UNLOCKED (2): + The deployment is unlocked. + LOCKING (3): + The deployment is being locked. + UNLOCKING (4): + The deployment is being unlocked. + LOCK_FAILED (5): + The deployment has failed to lock. + UNLOCK_FAILED (6): + The deployment has failed to unlock. + """ + LOCK_STATE_UNSPECIFIED = 0 + LOCKED = 1 + UNLOCKED = 2 + LOCKING = 3 + UNLOCKING = 4 + LOCK_FAILED = 5 + UNLOCK_FAILED = 6 + + terraform_blueprint: "TerraformBlueprint" = proto.Field( + proto.MESSAGE, + number=6, + oneof="blueprint", + message="TerraformBlueprint", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + latest_revision: str = proto.Field( + proto.STRING, + number=7, + ) + state_detail: str = proto.Field( + proto.STRING, + number=9, + ) + error_code: ErrorCode = proto.Field( + proto.ENUM, + number=10, + enum=ErrorCode, + ) + delete_results: "ApplyResults" = proto.Field( + proto.MESSAGE, + number=8, + message="ApplyResults", + ) + delete_build: str = proto.Field( + proto.STRING, + number=11, + ) + delete_logs: str = proto.Field( + proto.STRING, + number=12, + ) + tf_errors: MutableSequence["TerraformError"] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="TerraformError", + ) + error_logs: str = proto.Field( + proto.STRING, + number=14, + ) + artifacts_gcs_bucket: str = proto.Field( + proto.STRING, + number=15, + optional=True, + ) + service_account: str = proto.Field( + proto.STRING, + number=16, + optional=True, + ) + import_existing_resources: bool = proto.Field( + proto.BOOL, + number=17, + optional=True, + ) + worker_pool: str = proto.Field( + proto.STRING, + number=19, + optional=True, + ) + lock_state: LockState = proto.Field( + proto.ENUM, + number=20, + enum=LockState, + ) + + +class TerraformBlueprint(proto.Message): + r"""TerraformBlueprint describes the source of a Terraform root + module which describes the resources and configs to be deployed. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_source (str): + Required. URI of an object in Google Cloud Storage. Format: + ``gs://{bucket}/{object}`` + + URI may also specify an object version for zipped objects. + Format: ``gs://{bucket}/{object}#{version}`` + + This field is a member of `oneof`_ ``source``. + git_source (google.cloud.config_v1.types.GitSource): + Required. URI of a public Git repo. + + This field is a member of `oneof`_ ``source``. + input_values (MutableMapping[str, google.cloud.config_v1.types.TerraformVariable]): + Input variable values for the Terraform + blueprint. + """ + + gcs_source: str = proto.Field( + proto.STRING, + number=1, + oneof="source", + ) + git_source: "GitSource" = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message="GitSource", + ) + input_values: MutableMapping[str, "TerraformVariable"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=4, + message="TerraformVariable", + ) + + +class TerraformVariable(proto.Message): + r"""A Terraform input variable. + + Attributes: + input_value (google.protobuf.struct_pb2.Value): + Input variable value. + """ + + input_value: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Value, + ) + + +class ApplyResults(proto.Message): + r"""Outputs and artifacts from applying a deployment. + + Attributes: + content (str): + Location of a blueprint copy and other manifests in Google + Cloud Storage. Format: ``gs://{bucket}/{object}`` + artifacts (str): + Location of artifacts (e.g. logs) in Google Cloud Storage. + Format: ``gs://{bucket}/{object}`` + outputs (MutableMapping[str, google.cloud.config_v1.types.TerraformOutput]): + Map of output name to output info. + """ + + content: str = proto.Field( + proto.STRING, + number=1, + ) + artifacts: str = proto.Field( + proto.STRING, + number=2, + ) + outputs: MutableMapping[str, "TerraformOutput"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message="TerraformOutput", + ) + + +class TerraformOutput(proto.Message): + r"""Describes a Terraform output. + + Attributes: + sensitive (bool): + Identifies whether Terraform has set this + output as a potential sensitive value. + value (google.protobuf.struct_pb2.Value): + Value of output. + """ + + sensitive: bool = proto.Field( + proto.BOOL, + number=1, + ) + value: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + + +class ListDeploymentsRequest(proto.Message): + r""" + + Attributes: + parent (str): + Required. The parent in whose context the Deployments are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + page_size (int): + When requesting a page of resources, 'page_size' specifies + number of resources to return. If unspecified or set to 0, + all resources will be returned. + page_token (str): + Token returned by previous call to + 'ListDeployments' which specifies the position + in the list from where to continue listing the + resources. + filter (str): + Lists the Deployments that match the filter expression. A + filter expression filters the resources listed in the + response. The expression must be of the form '{field} + {operator} {value}' where operators: '<', '>', '<=', '>=', + '!=', '=', ':' are supported (colon ':' represents a HAS + operator which is roughly synonymous with equality). {field} + can refer to a proto or JSON field, or a synthetic field. + Field names can be camelCase or snake_case. + + Examples: + + - Filter by name: name = + "projects/foo/locations/us-central1/deployments/bar + + - Filter by labels: + + - Resources that have a key called 'foo' labels.foo:\* + - Resources that have a key called 'foo' whose value is + 'bar' labels.foo = bar + + - Filter by state: + + - Deployments in CREATING state. state=CREATING + order_by (str): + Field to use to sort the list. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDeploymentsResponse(proto.Message): + r""" + + Attributes: + deployments (MutableSequence[google.cloud.config_v1.types.Deployment]): + List of [Deployment][google.cloud.config.v1.Deployment]s. + next_page_token (str): + Token to be supplied to the next ListDeployments request via + ``page_token`` to obtain the next set of results. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + deployments: MutableSequence["Deployment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Deployment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetDeploymentRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. The name of the deployment. Format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListRevisionsRequest(proto.Message): + r"""A request to list Revisions passed to a 'ListRevisions' call. + + Attributes: + parent (str): + Required. The parent in whose context the Revisions are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + page_size (int): + When requesting a page of resources, ``page_size`` specifies + number of resources to return. If unspecified or set to 0, + all resources will be returned. + page_token (str): + Token returned by previous call to + 'ListRevisions' which specifies the position in + the list from where to continue listing the + resources. + filter (str): + Lists the Revisions that match the filter expression. A + filter expression filters the resources listed in the + response. The expression must be of the form '{field} + {operator} {value}' where operators: '<', '>', '<=', '>=', + '!=', '=', ':' are supported (colon ':' represents a HAS + operator which is roughly synonymous with equality). {field} + can refer to a proto or JSON field, or a synthetic field. + Field names can be camelCase or snake_case. + + Examples: + + - Filter by name: name = + "projects/foo/locations/us-central1/deployments/dep/revisions/bar + + - Filter by labels: + + - Resources that have a key called 'foo' labels.foo:\* + - Resources that have a key called 'foo' whose value is + 'bar' labels.foo = bar + + - Filter by state: + + - Revisions in CREATING state. state=CREATING + order_by (str): + Field to use to sort the list. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListRevisionsResponse(proto.Message): + r"""A response to a 'ListRevisions' call. Contains a list of + Revisions. + + Attributes: + revisions (MutableSequence[google.cloud.config_v1.types.Revision]): + List of [Revision][google.cloud.config.v1.Revision]s. + next_page_token (str): + A token to request the next page of resources + from the 'ListRevisions' method. The value of an + empty string means that there are no more + resources to return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + revisions: MutableSequence["Revision"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Revision", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetRevisionRequest(proto.Message): + r"""A request to get a Revision from a 'GetRevision' call. + + Attributes: + name (str): + Required. The name of the Revision in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}/revisions/{revision}'. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDeploymentRequest(proto.Message): + r""" + + Attributes: + parent (str): + Required. The parent in whose context the Deployment is + created. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + deployment_id (str): + Required. The Deployment ID. + deployment (google.cloud.config_v1.types.Deployment): + Required. [Deployment][google.cloud.config.v1.Deployment] + resource to be created. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deployment_id: str = proto.Field( + proto.STRING, + number=2, + ) + deployment: "Deployment" = proto.Field( + proto.MESSAGE, + number=3, + message="Deployment", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateDeploymentRequest(proto.Message): + r""" + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask used to specify the fields to be + overwritten in the Deployment resource by the update. + + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + deployment (google.cloud.config_v1.types.Deployment): + Required. [Deployment][google.cloud.config.v1.Deployment] to + update. + + The deployment's ``name`` field is used to identify the + resource to be updated. Format: + ``projects/{project}/locations/{location}/deployments/{deployment}`` + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + deployment: "Deployment" = proto.Field( + proto.MESSAGE, + number=2, + message="Deployment", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteDeploymentRequest(proto.Message): + r""" + + Attributes: + name (str): + Required. The name of the Deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, any revisions for + this deployment will also be deleted. + (Otherwise, the request will only work if the + deployment has no revisions.) + delete_policy (google.cloud.config_v1.types.DeleteDeploymentRequest.DeletePolicy): + Optional. Policy on how resources actuated by + the deployment should be deleted. If + unspecified, the default behavior is to delete + the underlying resources. + """ + + class DeletePolicy(proto.Enum): + r"""Policy on how resources actuated by the deployment should be + deleted. + + Values: + DELETE_POLICY_UNSPECIFIED (0): + Unspecified policy, resources will be + deleted. + DELETE (1): + Deletes resources actuated by the deployment. + ABANDON (2): + Abandons resources and only deletes the + deployment and its metadata. + """ + DELETE_POLICY_UNSPECIFIED = 0 + DELETE = 1 + ABANDON = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + delete_policy: DeletePolicy = proto.Field( + proto.ENUM, + number=4, + enum=DeletePolicy, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + deployment_metadata (google.cloud.config_v1.types.DeploymentOperationMetadata): + Output only. Metadata about the deployment + operation state. + + This field is a member of `oneof`_ ``resource_metadata``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + deployment_metadata: "DeploymentOperationMetadata" = proto.Field( + proto.MESSAGE, + number=8, + oneof="resource_metadata", + message="DeploymentOperationMetadata", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class Revision(proto.Message): + r"""A child resource of a Deployment generated by a + 'CreateDeployment' or 'UpdateDeployment' call. Each Revision + contains metadata pertaining to a snapshot of a particular + Deployment. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + terraform_blueprint (google.cloud.config_v1.types.TerraformBlueprint): + Output only. A blueprint described using + Terraform's HashiCorp Configuration Language as + a root module. + + This field is a member of `oneof`_ ``blueprint``. + name (str): + Revision name. Format: + ``projects/{project}/locations/{location}/deployments/{deployment}/ revisions/{revision}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the revision was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the revision was last + modified. + action (google.cloud.config_v1.types.Revision.Action): + Output only. The action which created this + revision + state (google.cloud.config_v1.types.Revision.State): + Output only. Current state of the revision. + apply_results (google.cloud.config_v1.types.ApplyResults): + Output only. Outputs and artifacts from + applying a deployment. + state_detail (str): + Output only. Additional info regarding the + current state. + error_code (google.cloud.config_v1.types.Revision.ErrorCode): + Output only. Code describing any errors that + may have occurred. + build (str): + Output only. Cloud Build instance UUID + associated with this revision. + logs (str): + Output only. Location of Revision operation logs in + ``gs://{bucket}/{object}`` format. + tf_errors (MutableSequence[google.cloud.config_v1.types.TerraformError]): + Output only. Errors encountered when creating or updating + this deployment. Errors are truncated to 10 entries, see + ``delete_results`` and ``error_logs`` for full details. + error_logs (str): + Output only. Location of Terraform error logs in Google + Cloud Storage. Format: ``gs://{bucket}/{object}``. + service_account (str): + Output only. User-specified Service Account (SA) to be used + as credential to manage resources. Format: + ``projects/{projectID}/serviceAccounts/{serviceAccount}`` + import_existing_resources (bool): + Output only. By default, Infra Manager will + return a failure when Terraform encounters a 409 + code (resource conflict error) during actuation. + If this flag is set to true, Infra Manager will + instead attempt to automatically import the + resource into the Terraform state (for supported + resource types) and continue actuation. + + Not all resource types are supported, refer to + documentation. + worker_pool (str): + Output only. The user-specified Cloud Build worker pool + resource in which the Cloud Build job will execute. Format: + ``projects/{project}/locations/{location}/workerPools/{workerPoolId}``. + If this field is unspecified, the default Cloud Build worker + pool will be used. + """ + + class Action(proto.Enum): + r"""Actions that generate a revision. + + Values: + ACTION_UNSPECIFIED (0): + The default value. This value is used if the + action is omitted. + CREATE (1): + The revision was generated by creating a + deployment. + UPDATE (2): + The revision was generated by updating a + deployment. + DELETE (3): + The revision was deleted. + """ + ACTION_UNSPECIFIED = 0 + CREATE = 1 + UPDATE = 2 + DELETE = 3 + + class State(proto.Enum): + r"""Possible states of a revision. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + APPLYING (1): + The revision is being applied. + APPLIED (2): + The revision was applied successfully. + FAILED (3): + The revision could not be applied + successfully. + """ + STATE_UNSPECIFIED = 0 + APPLYING = 1 + APPLIED = 2 + FAILED = 3 + + class ErrorCode(proto.Enum): + r"""Possible errors if Revision could not be created or updated + successfully. + + Values: + ERROR_CODE_UNSPECIFIED (0): + No error code was specified. + CLOUD_BUILD_PERMISSION_DENIED (1): + Cloud Build failed due to a permission issue. + APPLY_BUILD_API_FAILED (4): + Cloud Build job associated with creating or + updating a deployment could not be started. + APPLY_BUILD_RUN_FAILED (5): + Cloud Build job associated with creating or + updating a deployment was started but failed. + """ + ERROR_CODE_UNSPECIFIED = 0 + CLOUD_BUILD_PERMISSION_DENIED = 1 + APPLY_BUILD_API_FAILED = 4 + APPLY_BUILD_RUN_FAILED = 5 + + terraform_blueprint: "TerraformBlueprint" = proto.Field( + proto.MESSAGE, + number=6, + oneof="blueprint", + message="TerraformBlueprint", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + action: Action = proto.Field( + proto.ENUM, + number=4, + enum=Action, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + apply_results: "ApplyResults" = proto.Field( + proto.MESSAGE, + number=7, + message="ApplyResults", + ) + state_detail: str = proto.Field( + proto.STRING, + number=8, + ) + error_code: ErrorCode = proto.Field( + proto.ENUM, + number=9, + enum=ErrorCode, + ) + build: str = proto.Field( + proto.STRING, + number=10, + ) + logs: str = proto.Field( + proto.STRING, + number=11, + ) + tf_errors: MutableSequence["TerraformError"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="TerraformError", + ) + error_logs: str = proto.Field( + proto.STRING, + number=13, + ) + service_account: str = proto.Field( + proto.STRING, + number=14, + ) + import_existing_resources: bool = proto.Field( + proto.BOOL, + number=15, + ) + worker_pool: str = proto.Field( + proto.STRING, + number=17, + ) + + +class TerraformError(proto.Message): + r"""Errors encountered during actuation using Terraform + + Attributes: + resource_address (str): + Address of the resource associated with the error, e.g. + ``google_compute_network.vpc_network``. + http_response_code (int): + HTTP response code returned from Google Cloud + Platform APIs when Terraform fails to provision + the resource. If unset or 0, no HTTP response + code was returned by Terraform. + error_description (str): + A human-readable error description. + error (google.rpc.status_pb2.Status): + Original error response from underlying + Google API, if available. + """ + + resource_address: str = proto.Field( + proto.STRING, + number=1, + ) + http_response_code: int = proto.Field( + proto.INT32, + number=2, + ) + error_description: str = proto.Field( + proto.STRING, + number=3, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +class GitSource(proto.Message): + r"""A set of files in a Git repository. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + repo (str): + Optional. Repository URL. + Example: + 'https://github.com/kubernetes/examples.git' + + This field is a member of `oneof`_ ``_repo``. + directory (str): + Optional. Subdirectory inside the repository. + Example: 'staging/my-package' + + This field is a member of `oneof`_ ``_directory``. + ref (str): + Optional. Git reference (e.g. branch or tag). + + This field is a member of `oneof`_ ``_ref``. + """ + + repo: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + directory: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + ref: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + + +class DeploymentOperationMetadata(proto.Message): + r"""Ephemeral metadata content describing the state of a + deployment operation. + + Attributes: + step (google.cloud.config_v1.types.DeploymentOperationMetadata.DeploymentStep): + The current step the deployment operation is + running. + apply_results (google.cloud.config_v1.types.ApplyResults): + Outputs and artifacts from applying a + deployment. + build (str): + Output only. Cloud Build instance UUID + associated with this operation. + logs (str): + Output only. Location of Deployment operations logs in + ``gs://{bucket}/{object}`` format. + """ + + class DeploymentStep(proto.Enum): + r"""The possible steps a deployment may be running. + + Values: + DEPLOYMENT_STEP_UNSPECIFIED (0): + Unspecified deployment step + PREPARING_STORAGE_BUCKET (1): + Infra Manager is creating a Google Cloud + Storage bucket to store artifacts and metadata + about the deployment and revision + DOWNLOADING_BLUEPRINT (2): + Downloading the blueprint onto the Google + Cloud Storage bucket + RUNNING_TF_INIT (3): + Initializing Terraform using ``terraform init`` + RUNNING_TF_PLAN (4): + Running ``terraform plan`` + RUNNING_TF_APPLY (5): + Actuating resources using Terraform using + ``terraform apply`` + RUNNING_TF_DESTROY (6): + Destroying resources using Terraform using + ``terraform destroy`` + RUNNING_TF_VALIDATE (7): + Validating the uploaded TF state file when + unlocking a deployment + UNLOCKING_DEPLOYMENT (8): + Unlocking a deployment + SUCCEEDED (9): + Operation was successful + FAILED (10): + Operation failed + """ + DEPLOYMENT_STEP_UNSPECIFIED = 0 + PREPARING_STORAGE_BUCKET = 1 + DOWNLOADING_BLUEPRINT = 2 + RUNNING_TF_INIT = 3 + RUNNING_TF_PLAN = 4 + RUNNING_TF_APPLY = 5 + RUNNING_TF_DESTROY = 6 + RUNNING_TF_VALIDATE = 7 + UNLOCKING_DEPLOYMENT = 8 + SUCCEEDED = 9 + FAILED = 10 + + step: DeploymentStep = proto.Field( + proto.ENUM, + number=1, + enum=DeploymentStep, + ) + apply_results: "ApplyResults" = proto.Field( + proto.MESSAGE, + number=2, + message="ApplyResults", + ) + build: str = proto.Field( + proto.STRING, + number=3, + ) + logs: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Resource(proto.Message): + r"""Resource represents a Google Cloud Platform resource actuated + by IM. Resources are child resources of Revisions. + + Attributes: + name (str): + Output only. Resource name. Format: + ``projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}/resources/{resource}`` + terraform_info (google.cloud.config_v1.types.ResourceTerraformInfo): + Output only. Terraform-specific info if this + resource was created using Terraform. + cai_assets (MutableMapping[str, google.cloud.config_v1.types.ResourceCAIInfo]): + Output only. Map of Cloud Asset Inventory + (CAI) type to CAI info (e.g. CAI ID). CAI type + format follows + https://cloud.google.com/asset-inventory/docs/supported-asset-types + intent (google.cloud.config_v1.types.Resource.Intent): + Output only. Intent of the resource. + state (google.cloud.config_v1.types.Resource.State): + Output only. Current state of the resource. + """ + + class Intent(proto.Enum): + r"""Possible intent of the resource. + + Values: + INTENT_UNSPECIFIED (0): + The default value. This value is used if the + intent is omitted. + CREATE (1): + Infra Manager will create this Resource. + UPDATE (2): + Infra Manager will update this Resource. + DELETE (3): + Infra Manager will delete this Resource. + RECREATE (4): + Infra Manager will destroy and recreate this + Resource. + UNCHANGED (5): + Infra Manager will leave this Resource + untouched. + """ + INTENT_UNSPECIFIED = 0 + CREATE = 1 + UPDATE = 2 + DELETE = 3 + RECREATE = 4 + UNCHANGED = 5 + + class State(proto.Enum): + r"""Possible states of a resource. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + PLANNED (1): + Resource has been planned for reconcile. + IN_PROGRESS (2): + Resource is actively reconciling into the + intended state. + RECONCILED (3): + Resource has reconciled to intended state. + FAILED (4): + Resource failed to reconcile. + """ + STATE_UNSPECIFIED = 0 + PLANNED = 1 + IN_PROGRESS = 2 + RECONCILED = 3 + FAILED = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + terraform_info: "ResourceTerraformInfo" = proto.Field( + proto.MESSAGE, + number=2, + message="ResourceTerraformInfo", + ) + cai_assets: MutableMapping[str, "ResourceCAIInfo"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message="ResourceCAIInfo", + ) + intent: Intent = proto.Field( + proto.ENUM, + number=4, + enum=Intent, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + + +class ResourceTerraformInfo(proto.Message): + r"""Terraform info of a Resource. + + Attributes: + address (str): + TF resource address that uniquely identifies + this resource within this deployment. + type_ (str): + TF resource type + id (str): + ID attribute of the TF resource + """ + + address: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ResourceCAIInfo(proto.Message): + r"""CAI info of a Resource. + + Attributes: + full_resource_name (str): + CAI resource name in the format following + https://cloud.google.com/apis/design/resource_names#full_resource_name + """ + + full_resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetResourceRequest(proto.Message): + r"""A request to get a Resource from a 'GetResource' call. + + Attributes: + name (str): + Required. The name of the Resource in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}/revisions/{revision}/resource/{resource}'. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListResourcesRequest(proto.Message): + r"""A request to list Resources passed to a 'ListResources' call. + + Attributes: + parent (str): + Required. The parent in whose context the Resources are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}/revisions/{revision}'. + page_size (int): + When requesting a page of resources, 'page_size' specifies + number of resources to return. If unspecified or set to 0, + all resources will be returned. + page_token (str): + Token returned by previous call to + 'ListResources' which specifies the position in + the list from where to continue listing the + resources. + filter (str): + Lists the Resources that match the filter expression. A + filter expression filters the resources listed in the + response. The expression must be of the form '{field} + {operator} {value}' where operators: '<', '>', '<=', '>=', + '!=', '=', ':' are supported (colon ':' represents a HAS + operator which is roughly synonymous with equality). {field} + can refer to a proto or JSON field, or a synthetic field. + Field names can be camelCase or snake_case. + + Examples: + + - Filter by name: name = + "projects/foo/locations/us-central1/deployments/dep/revisions/bar/resources/baz + order_by (str): + Field to use to sort the list. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListResourcesResponse(proto.Message): + r"""A response to a 'ListResources' call. Contains a list of + Resources. + + Attributes: + resources (MutableSequence[google.cloud.config_v1.types.Resource]): + List of [Resources][]s. + next_page_token (str): + A token to request the next page of resources + from the 'ListResources' method. The value of an + empty string means that there are no more + resources to return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + resources: MutableSequence["Resource"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Resource", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class Statefile(proto.Message): + r"""Contains info about a Terraform state file + + Attributes: + signed_uri (str): + Output only. Cloud Storage signed URI used + for downloading or uploading the state file. + """ + + signed_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExportDeploymentStatefileRequest(proto.Message): + r"""A request to export a state file passed to a + 'ExportDeploymentStatefile' call. + + Attributes: + parent (str): + Required. The parent in whose context the statefile is + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + draft (bool): + Optional. If this flag is set to true, the + exported deployment state file will be the draft + state. This will enable the draft file to be + validated before copying it over to the working + state on unlock. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + draft: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ExportRevisionStatefileRequest(proto.Message): + r"""A request to export a state file passed to a + 'ExportRevisionStatefile' call. + + Attributes: + parent (str): + Required. The parent in whose context the statefile is + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}/revisions/{revision}'. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ImportStatefileRequest(proto.Message): + r"""A request to import a state file passed to a + 'ImportStatefile' call. + + Attributes: + parent (str): + Required. The parent in whose context the statefile is + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + lock_id (int): + Required. Lock ID of the lock file to verify + that the user who is importing the state file + previously locked the Deployment. + skip_draft (bool): + Optional. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + lock_id: int = proto.Field( + proto.INT64, + number=2, + ) + skip_draft: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteStatefileRequest(proto.Message): + r"""A request to delete a state file passed to a + 'DeleteStatefile' call. + + Attributes: + name (str): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + lock_id (int): + Required. Lock ID of the lock file to verify + that the user who is deleting the state file + previously locked the Deployment. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + lock_id: int = proto.Field( + proto.INT64, + number=2, + ) + + +class LockDeploymentRequest(proto.Message): + r"""A request to lock a deployment passed to a 'LockDeployment' + call. + + Attributes: + name (str): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UnlockDeploymentRequest(proto.Message): + r"""A request to unlock a state file passed to a + 'UnlockDeployment' call. + + Attributes: + name (str): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + lock_id (int): + Required. Lock ID of the lock file to be + unlocked. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + lock_id: int = proto.Field( + proto.INT64, + number=2, + ) + + +class ExportLockInfoRequest(proto.Message): + r"""A request to get a state file lock info passed to a + 'ExportLockInfo' call. + + Attributes: + name (str): + Required. The name of the deployment in the format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LockInfo(proto.Message): + r"""Details about the lock which locked the deployment. + + Attributes: + lock_id (int): + Unique ID for the lock to be overridden with + generation ID in the backend. + operation (str): + Terraform operation, provided by the caller. + info (str): + Extra information to store with the lock, + provided by the caller. + who (str): + user@hostname when available + version (str): + Terraform version + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time that the lock was taken. + """ + + lock_id: int = proto.Field( + proto.INT64, + number=1, + ) + operation: str = proto.Field( + proto.STRING, + number=2, + ) + info: str = proto.Field( + proto.STRING, + number=3, + ) + who: str = proto.Field( + proto.STRING, + number=4, + ) + version: str = proto.Field( + proto.STRING, + number=5, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-config/mypy.ini b/packages/google-cloud-config/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-config/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-config/noxfile.py b/packages/google-cloud-config/noxfile.py new file mode 100644 index 000000000000..9a2acd8b6787 --- /dev/null +++ b/packages/google-cloud-config/noxfile.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_async.py new file mode 100644 index 000000000000..9f5a6f8d4515 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_CreateDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_create_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + deployment = config_v1.Deployment() + deployment.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreateDeploymentRequest( + parent="parent_value", + deployment_id="deployment_id_value", + deployment=deployment, + ) + + # Make the request + operation = client.create_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_CreateDeployment_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_sync.py new file mode 100644 index 000000000000..4ff94956cc0b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_CreateDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_create_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + deployment = config_v1.Deployment() + deployment.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreateDeploymentRequest( + parent="parent_value", + deployment_id="deployment_id_value", + deployment=deployment, + ) + + # Make the request + operation = client.create_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_CreateDeployment_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_async.py new file mode 100644 index 000000000000..866144f716af --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeleteDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_delete_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeleteDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_DeleteDeployment_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_sync.py new file mode 100644 index 000000000000..198ecc75583f --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeleteDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_delete_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeleteDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_DeleteDeployment_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_statefile_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_statefile_async.py new file mode 100644 index 000000000000..9b29aaeda1cc --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_statefile_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteStatefile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeleteStatefile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_delete_statefile(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeleteStatefileRequest( + name="name_value", + lock_id=725, + ) + + # Make the request + await client.delete_statefile(request=request) + + +# [END config_v1_generated_Config_DeleteStatefile_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_statefile_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_statefile_sync.py new file mode 100644 index 000000000000..0e0bff321bfd --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_statefile_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteStatefile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeleteStatefile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_delete_statefile(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeleteStatefileRequest( + name="name_value", + lock_id=725, + ) + + # Make the request + client.delete_statefile(request=request) + + +# [END config_v1_generated_Config_DeleteStatefile_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_deployment_statefile_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_deployment_statefile_async.py new file mode 100644 index 000000000000..9ddc0b71e08b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_deployment_statefile_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportDeploymentStatefile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ExportDeploymentStatefile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_export_deployment_statefile(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ExportDeploymentStatefileRequest( + parent="parent_value", + ) + + # Make the request + response = await client.export_deployment_statefile(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ExportDeploymentStatefile_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_deployment_statefile_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_deployment_statefile_sync.py new file mode 100644 index 000000000000..de50f1469e96 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_deployment_statefile_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportDeploymentStatefile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ExportDeploymentStatefile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_export_deployment_statefile(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ExportDeploymentStatefileRequest( + parent="parent_value", + ) + + # Make the request + response = client.export_deployment_statefile(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ExportDeploymentStatefile_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_lock_info_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_lock_info_async.py new file mode 100644 index 000000000000..618eccf6031e --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_lock_info_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportLockInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ExportLockInfo_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_export_lock_info(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ExportLockInfoRequest( + name="name_value", + ) + + # Make the request + response = await client.export_lock_info(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ExportLockInfo_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_lock_info_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_lock_info_sync.py new file mode 100644 index 000000000000..9dc042354cbb --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_lock_info_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportLockInfo +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ExportLockInfo_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_export_lock_info(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ExportLockInfoRequest( + name="name_value", + ) + + # Make the request + response = client.export_lock_info(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ExportLockInfo_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_revision_statefile_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_revision_statefile_async.py new file mode 100644 index 000000000000..cb5c02d7d650 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_revision_statefile_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportRevisionStatefile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ExportRevisionStatefile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_export_revision_statefile(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ExportRevisionStatefileRequest( + parent="parent_value", + ) + + # Make the request + response = await client.export_revision_statefile(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ExportRevisionStatefile_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_revision_statefile_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_revision_statefile_sync.py new file mode 100644 index 000000000000..c101b8f3554f --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_revision_statefile_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportRevisionStatefile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ExportRevisionStatefile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_export_revision_statefile(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ExportRevisionStatefileRequest( + parent="parent_value", + ) + + # Make the request + response = client.export_revision_statefile(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ExportRevisionStatefile_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_async.py new file mode 100644 index 000000000000..12d9da8c4d41 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_get_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deployment(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_GetDeployment_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_sync.py new file mode 100644 index 000000000000..29877f5621c2 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_get_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_GetDeployment_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_resource_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_resource_async.py new file mode 100644 index 000000000000..5af489c3823a --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_resource_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetResource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetResource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_get_resource(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetResourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_resource(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_GetResource_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_resource_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_resource_sync.py new file mode 100644 index 000000000000..c010cc9b97b3 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_resource_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetResource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetResource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_get_resource(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetResourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_resource(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_GetResource_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_revision_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_revision_async.py new file mode 100644 index 000000000000..cbbbedb33498 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_revision_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRevision +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetRevision_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_get_revision(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetRevisionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_revision(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_GetRevision_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_revision_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_revision_sync.py new file mode 100644 index 000000000000..95ef695541a5 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_revision_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetRevision +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetRevision_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_get_revision(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetRevisionRequest( + name="name_value", + ) + + # Make the request + response = client.get_revision(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_GetRevision_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_import_statefile_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_import_statefile_async.py new file mode 100644 index 000000000000..93d7e21bed32 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_import_statefile_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportStatefile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ImportStatefile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_import_statefile(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ImportStatefileRequest( + parent="parent_value", + lock_id=725, + ) + + # Make the request + response = await client.import_statefile(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ImportStatefile_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_import_statefile_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_import_statefile_sync.py new file mode 100644 index 000000000000..4fdbe2b26933 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_import_statefile_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportStatefile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ImportStatefile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_import_statefile(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ImportStatefileRequest( + parent="parent_value", + lock_id=725, + ) + + # Make the request + response = client.import_statefile(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ImportStatefile_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployments_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployments_async.py new file mode 100644 index 000000000000..0e610caebe93 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployments_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeployments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListDeployments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_list_deployments(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END config_v1_generated_Config_ListDeployments_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployments_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployments_sync.py new file mode 100644 index 000000000000..dac0a9c6e11e --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeployments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListDeployments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_list_deployments(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END config_v1_generated_Config_ListDeployments_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_resources_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_resources_async.py new file mode 100644 index 000000000000..4ce4fa8fe6fd --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_resources_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListResources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListResources_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_list_resources(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListResourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_resources(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END config_v1_generated_Config_ListResources_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_resources_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_resources_sync.py new file mode 100644 index 000000000000..620e9bf97485 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_resources_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListResources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListResources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_list_resources(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListResourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_resources(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END config_v1_generated_Config_ListResources_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_revisions_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_revisions_async.py new file mode 100644 index 000000000000..0403d42efd50 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_revisions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListRevisions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_list_revisions(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListRevisionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_revisions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END config_v1_generated_Config_ListRevisions_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_revisions_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_revisions_sync.py new file mode 100644 index 000000000000..42746c70dd18 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_revisions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListRevisions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_list_revisions(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListRevisionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END config_v1_generated_Config_ListRevisions_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_lock_deployment_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_lock_deployment_async.py new file mode 100644 index 000000000000..4bad83dff4e0 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_lock_deployment_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LockDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_LockDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_lock_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.LockDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.lock_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_LockDeployment_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_lock_deployment_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_lock_deployment_sync.py new file mode 100644 index 000000000000..d706b4f4fe8c --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_lock_deployment_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LockDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_LockDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_lock_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.LockDeploymentRequest( + name="name_value", + ) + + # Make the request + operation = client.lock_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_LockDeployment_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_unlock_deployment_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_unlock_deployment_async.py new file mode 100644 index 000000000000..f0ec2cf6aa7d --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_unlock_deployment_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UnlockDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_UnlockDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_unlock_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.UnlockDeploymentRequest( + name="name_value", + lock_id=725, + ) + + # Make the request + operation = client.unlock_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_UnlockDeployment_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_unlock_deployment_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_unlock_deployment_sync.py new file mode 100644 index 000000000000..a0e78638e712 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_unlock_deployment_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UnlockDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_UnlockDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_unlock_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.UnlockDeploymentRequest( + name="name_value", + lock_id=725, + ) + + # Make the request + operation = client.unlock_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_UnlockDeployment_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_async.py new file mode 100644 index 000000000000..7d705d83d6b9 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_UpdateDeployment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_update_deployment(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + deployment = config_v1.Deployment() + deployment.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.UpdateDeploymentRequest( + deployment=deployment, + ) + + # Make the request + operation = client.update_deployment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_UpdateDeployment_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_sync.py new file mode 100644 index 000000000000..3ae193bf307c --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeployment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_UpdateDeployment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_update_deployment(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + deployment = config_v1.Deployment() + deployment.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.UpdateDeploymentRequest( + deployment=deployment, + ) + + # Make the request + operation = client.update_deployment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_UpdateDeployment_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json new file mode 100644 index 000000000000..76af7d2fce65 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json @@ -0,0 +1,2609 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.config.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-config", + "version": "0.1.1" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.create_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.CreateDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "CreateDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.CreateDeploymentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deployment", + "type": "google.cloud.config_v1.types.Deployment" + }, + { + "name": "deployment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_deployment" + }, + "description": "Sample for CreateDeployment", + "file": "config_v1_generated_config_create_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_CreateDeployment_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_create_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.create_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.CreateDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "CreateDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.CreateDeploymentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deployment", + "type": "google.cloud.config_v1.types.Deployment" + }, + { + "name": "deployment_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_deployment" + }, + "description": "Sample for CreateDeployment", + "file": "config_v1_generated_config_create_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_CreateDeployment_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_create_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.DeleteDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "DeleteDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.DeleteDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_deployment" + }, + "description": "Sample for DeleteDeployment", + "file": "config_v1_generated_config_delete_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_DeleteDeployment_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_delete_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.delete_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.DeleteDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "DeleteDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.DeleteDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_deployment" + }, + "description": "Sample for DeleteDeployment", + "file": "config_v1_generated_config_delete_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_DeleteDeployment_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_delete_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.DeleteStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "DeleteStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_statefile" + }, + "description": "Sample for DeleteStatefile", + "file": "config_v1_generated_config_delete_statefile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_DeleteStatefile_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_delete_statefile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.delete_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.DeleteStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "DeleteStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_statefile" + }, + "description": "Sample for DeleteStatefile", + "file": "config_v1_generated_config_delete_statefile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_DeleteStatefile_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_delete_statefile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_deployment_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportDeploymentStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_deployment_statefile" + }, + "description": "Sample for ExportDeploymentStatefile", + "file": "config_v1_generated_config_export_deployment_statefile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_deployment_statefile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_deployment_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportDeploymentStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_deployment_statefile" + }, + "description": "Sample for ExportDeploymentStatefile", + "file": "config_v1_generated_config_export_deployment_statefile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_deployment_statefile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_lock_info", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportLockInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportLockInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.LockInfo", + "shortName": "export_lock_info" + }, + "description": "Sample for ExportLockInfo", + "file": "config_v1_generated_config_export_lock_info_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportLockInfo_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_lock_info_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_lock_info", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportLockInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportLockInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.LockInfo", + "shortName": "export_lock_info" + }, + "description": "Sample for ExportLockInfo", + "file": "config_v1_generated_config_export_lock_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportLockInfo_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_lock_info_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_revision_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportRevisionStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_revision_statefile" + }, + "description": "Sample for ExportRevisionStatefile", + "file": "config_v1_generated_config_export_revision_statefile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_revision_statefile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_revision_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportRevisionStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_revision_statefile" + }, + "description": "Sample for ExportRevisionStatefile", + "file": "config_v1_generated_config_export_revision_statefile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_revision_statefile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.GetDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "GetDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.GetDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Deployment", + "shortName": "get_deployment" + }, + "description": "Sample for GetDeployment", + "file": "config_v1_generated_config_get_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_GetDeployment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_get_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.get_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.GetDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "GetDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.GetDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Deployment", + "shortName": "get_deployment" + }, + "description": "Sample for GetDeployment", + "file": "config_v1_generated_config_get_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_GetDeployment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_get_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_resource", + "method": { + "fullName": "google.cloud.config.v1.Config.GetResource", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "GetResource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.GetResourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Resource", + "shortName": "get_resource" + }, + "description": "Sample for GetResource", + "file": "config_v1_generated_config_get_resource_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_GetResource_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_get_resource_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.get_resource", + "method": { + "fullName": "google.cloud.config.v1.Config.GetResource", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "GetResource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.GetResourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Resource", + "shortName": "get_resource" + }, + "description": "Sample for GetResource", + "file": "config_v1_generated_config_get_resource_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_GetResource_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_get_resource_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_revision", + "method": { + "fullName": "google.cloud.config.v1.Config.GetRevision", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "GetRevision" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.GetRevisionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Revision", + "shortName": "get_revision" + }, + "description": "Sample for GetRevision", + "file": "config_v1_generated_config_get_revision_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_GetRevision_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_get_revision_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.get_revision", + "method": { + "fullName": "google.cloud.config.v1.Config.GetRevision", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "GetRevision" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.GetRevisionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Revision", + "shortName": "get_revision" + }, + "description": "Sample for GetRevision", + "file": "config_v1_generated_config_get_revision_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_GetRevision_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_get_revision_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.import_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ImportStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ImportStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ImportStatefileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "lock_id", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "import_statefile" + }, + "description": "Sample for ImportStatefile", + "file": "config_v1_generated_config_import_statefile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ImportStatefile_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_import_statefile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.import_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ImportStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ImportStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ImportStatefileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "lock_id", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "import_statefile" + }, + "description": "Sample for ImportStatefile", + "file": "config_v1_generated_config_import_statefile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ImportStatefile_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_import_statefile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.list_deployments", + "method": { + "fullName": "google.cloud.config.v1.Config.ListDeployments", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListDeployments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListDeploymentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListDeploymentsAsyncPager", + "shortName": "list_deployments" + }, + "description": "Sample for ListDeployments", + "file": "config_v1_generated_config_list_deployments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListDeployments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_deployments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.list_deployments", + "method": { + "fullName": "google.cloud.config.v1.Config.ListDeployments", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListDeployments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListDeploymentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListDeploymentsPager", + "shortName": "list_deployments" + }, + "description": "Sample for ListDeployments", + "file": "config_v1_generated_config_list_deployments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListDeployments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_deployments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.list_resources", + "method": { + "fullName": "google.cloud.config.v1.Config.ListResources", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListResources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListResourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListResourcesAsyncPager", + "shortName": "list_resources" + }, + "description": "Sample for ListResources", + "file": "config_v1_generated_config_list_resources_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListResources_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_resources_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.list_resources", + "method": { + "fullName": "google.cloud.config.v1.Config.ListResources", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListResources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListResourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListResourcesPager", + "shortName": "list_resources" + }, + "description": "Sample for ListResources", + "file": "config_v1_generated_config_list_resources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListResources_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_resources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.list_revisions", + "method": { + "fullName": "google.cloud.config.v1.Config.ListRevisions", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListRevisionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListRevisionsAsyncPager", + "shortName": "list_revisions" + }, + "description": "Sample for ListRevisions", + "file": "config_v1_generated_config_list_revisions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListRevisions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_revisions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.list_revisions", + "method": { + "fullName": "google.cloud.config.v1.Config.ListRevisions", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListRevisionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListRevisionsPager", + "shortName": "list_revisions" + }, + "description": "Sample for ListRevisions", + "file": "config_v1_generated_config_list_revisions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListRevisions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_revisions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.lock_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.LockDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "LockDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.LockDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "lock_deployment" + }, + "description": "Sample for LockDeployment", + "file": "config_v1_generated_config_lock_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_LockDeployment_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_lock_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.lock_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.LockDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "LockDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.LockDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "lock_deployment" + }, + "description": "Sample for LockDeployment", + "file": "config_v1_generated_config_lock_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_LockDeployment_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_lock_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.unlock_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.UnlockDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "UnlockDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.UnlockDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "lock_id", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "unlock_deployment" + }, + "description": "Sample for UnlockDeployment", + "file": "config_v1_generated_config_unlock_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_UnlockDeployment_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_unlock_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.unlock_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.UnlockDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "UnlockDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.UnlockDeploymentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "lock_id", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "unlock_deployment" + }, + "description": "Sample for UnlockDeployment", + "file": "config_v1_generated_config_unlock_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_UnlockDeployment_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_unlock_deployment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.update_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.UpdateDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "UpdateDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.UpdateDeploymentRequest" + }, + { + "name": "deployment", + "type": "google.cloud.config_v1.types.Deployment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_deployment" + }, + "description": "Sample for UpdateDeployment", + "file": "config_v1_generated_config_update_deployment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_UpdateDeployment_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_update_deployment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.update_deployment", + "method": { + "fullName": "google.cloud.config.v1.Config.UpdateDeployment", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "UpdateDeployment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.UpdateDeploymentRequest" + }, + { + "name": "deployment", + "type": "google.cloud.config_v1.types.Deployment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_deployment" + }, + "description": "Sample for UpdateDeployment", + "file": "config_v1_generated_config_update_deployment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_UpdateDeployment_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_update_deployment_sync.py" + } + ] +} diff --git a/packages/google-cloud-config/scripts/decrypt-secrets.sh b/packages/google-cloud-config/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-config/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-config/scripts/fixup_config_v1_keywords.py b/packages/google-cloud-config/scripts/fixup_config_v1_keywords.py new file mode 100644 index 000000000000..8d7b0d1d739f --- /dev/null +++ b/packages/google-cloud-config/scripts/fixup_config_v1_keywords.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class configCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_deployment': ('parent', 'deployment_id', 'deployment', 'request_id', ), + 'delete_deployment': ('name', 'request_id', 'force', 'delete_policy', ), + 'delete_statefile': ('name', 'lock_id', ), + 'export_deployment_statefile': ('parent', 'draft', ), + 'export_lock_info': ('name', ), + 'export_revision_statefile': ('parent', ), + 'get_deployment': ('name', ), + 'get_resource': ('name', ), + 'get_revision': ('name', ), + 'import_statefile': ('parent', 'lock_id', 'skip_draft', ), + 'list_deployments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_resources': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_revisions': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'lock_deployment': ('name', ), + 'unlock_deployment': ('name', 'lock_id', ), + 'update_deployment': ('deployment', 'update_mask', 'request_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=configCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the config client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-config/setup.py b/packages/google-cloud-config/setup.py new file mode 100644 index 000000000000..a796b4fbb5b3 --- /dev/null +++ b/packages/google-cloud-config/setup.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-config" + + +description = "Google Cloud Config API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/config/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-config/testing/.gitignore b/packages/google-cloud-config/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-config/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-config/testing/constraints-3.10.txt b/packages/google-cloud-config/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-config/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-config/testing/constraints-3.11.txt b/packages/google-cloud-config/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-config/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-config/testing/constraints-3.12.txt b/packages/google-cloud-config/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-config/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-config/testing/constraints-3.7.txt b/packages/google-cloud-config/testing/constraints-3.7.txt new file mode 100644 index 000000000000..2beecf99e0be --- /dev/null +++ b/packages/google-cloud-config/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/packages/google-cloud-config/testing/constraints-3.8.txt b/packages/google-cloud-config/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-config/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-config/testing/constraints-3.9.txt b/packages/google-cloud-config/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-config/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-config/tests/__init__.py b/packages/google-cloud-config/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-config/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-config/tests/unit/__init__.py b/packages/google-cloud-config/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-config/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-config/tests/unit/gapic/__init__.py b/packages/google-cloud-config/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-config/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-config/tests/unit/gapic/config_v1/__init__.py b/packages/google-cloud-config/tests/unit/gapic/config_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-config/tests/unit/gapic/config_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py new file mode 100644 index 000000000000..e3cb3a085024 --- /dev/null +++ b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py @@ -0,0 +1,12618 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.config_v1.services.config import ( + ConfigAsyncClient, + ConfigClient, + pagers, + transports, +) +from google.cloud.config_v1.types import config + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ConfigClient._get_default_mtls_endpoint(None) is None + assert ConfigClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + ConfigClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + ) + assert ( + ConfigClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConfigClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ConfigClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConfigClient, "grpc"), + (ConfigAsyncClient, "grpc_asyncio"), + (ConfigClient, "rest"), + ], +) +def test_config_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "config.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://config.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ConfigGrpcTransport, "grpc"), + (transports.ConfigGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ConfigRestTransport, "rest"), + ], +) +def test_config_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConfigClient, "grpc"), + (ConfigAsyncClient, "grpc_asyncio"), + (ConfigClient, "rest"), + ], +) +def test_config_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "config.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://config.googleapis.com" + ) + + +def test_config_client_get_transport_class(): + transport = ConfigClient.get_transport_class() + available_transports = [ + transports.ConfigGrpcTransport, + transports.ConfigRestTransport, + ] + assert transport in available_transports + + transport = ConfigClient.get_transport_class("grpc") + assert transport == transports.ConfigGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConfigClient, transports.ConfigGrpcTransport, "grpc"), + (ConfigAsyncClient, transports.ConfigGrpcAsyncIOTransport, "grpc_asyncio"), + (ConfigClient, transports.ConfigRestTransport, "rest"), + ], +) +@mock.patch.object( + ConfigClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigClient) +) +@mock.patch.object( + ConfigAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigAsyncClient) +) +def test_config_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ConfigClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ConfigClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ConfigClient, transports.ConfigGrpcTransport, "grpc", "true"), + ( + ConfigAsyncClient, + transports.ConfigGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ConfigClient, transports.ConfigGrpcTransport, "grpc", "false"), + ( + ConfigAsyncClient, + transports.ConfigGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ConfigClient, transports.ConfigRestTransport, "rest", "true"), + (ConfigClient, transports.ConfigRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ConfigClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigClient) +) +@mock.patch.object( + ConfigAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigAsyncClient) +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_config_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ConfigClient, ConfigAsyncClient]) +@mock.patch.object( + ConfigClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigClient) +) +@mock.patch.object( + ConfigAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigAsyncClient) +) +def test_config_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConfigClient, transports.ConfigGrpcTransport, "grpc"), + (ConfigAsyncClient, transports.ConfigGrpcAsyncIOTransport, "grpc_asyncio"), + (ConfigClient, transports.ConfigRestTransport, "rest"), + ], +) +def test_config_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (ConfigClient, transports.ConfigGrpcTransport, "grpc", grpc_helpers), + ( + ConfigAsyncClient, + transports.ConfigGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ConfigClient, transports.ConfigRestTransport, "rest", None), + ], +) +def test_config_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_config_client_client_options_from_dict(): + with mock.patch( + "google.cloud.config_v1.services.config.transports.ConfigGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ConfigClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (ConfigClient, transports.ConfigGrpcTransport, "grpc", grpc_helpers), + ( + ConfigAsyncClient, + transports.ConfigGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_config_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "config.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="config.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListDeploymentsRequest, + dict, + ], +) +def test_list_deployments(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListDeploymentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_deployments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + client.list_deployments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListDeploymentsRequest() + + +@pytest.mark.asyncio +async def test_list_deployments_async( + transport: str = "grpc_asyncio", request_type=config.ListDeploymentsRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListDeploymentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_deployments_async_from_dict(): + await test_list_deployments_async(request_type=dict) + + +def test_list_deployments_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListDeploymentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + call.return_value = config.ListDeploymentsResponse() + client.list_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_deployments_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListDeploymentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentsResponse() + ) + await client.list_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_deployments_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListDeploymentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deployments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_deployments_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployments( + config.ListDeploymentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_deployments_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListDeploymentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deployments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_deployments_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deployments( + config.ListDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_deployments_pager(transport_name: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + config.Deployment(), + ], + next_page_token="abc", + ), + config.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + ], + next_page_token="ghi", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_deployments(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Deployment) for i in results) + + +def test_list_deployments_pages(transport_name: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + config.Deployment(), + ], + next_page_token="abc", + ), + config.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + ], + next_page_token="ghi", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_deployments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_deployments_async_pager(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + config.Deployment(), + ], + next_page_token="abc", + ), + config.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + ], + next_page_token="ghi", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_deployments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, config.Deployment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_deployments_async_pages(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + config.Deployment(), + ], + next_page_token="abc", + ), + config.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + ], + next_page_token="ghi", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_deployments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetDeploymentRequest, + dict, + ], +) +def test_get_deployment(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Deployment( + name="name_value", + state=config.Deployment.State.CREATING, + latest_revision="latest_revision_value", + state_detail="state_detail_value", + error_code=config.Deployment.ErrorCode.REVISION_FAILED, + delete_build="delete_build_value", + delete_logs="delete_logs_value", + error_logs="error_logs_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + lock_state=config.Deployment.LockState.LOCKED, + ) + response = client.get_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Deployment) + assert response.name == "name_value" + assert response.state == config.Deployment.State.CREATING + assert response.latest_revision == "latest_revision_value" + assert response.state_detail == "state_detail_value" + assert response.error_code == config.Deployment.ErrorCode.REVISION_FAILED + assert response.delete_build == "delete_build_value" + assert response.delete_logs == "delete_logs_value" + assert response.error_logs == "error_logs_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.service_account == "service_account_value" + assert response.import_existing_resources is True + assert response.worker_pool == "worker_pool_value" + assert response.lock_state == config.Deployment.LockState.LOCKED + + +def test_get_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + client.get_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetDeploymentRequest() + + +@pytest.mark.asyncio +async def test_get_deployment_async( + transport: str = "grpc_asyncio", request_type=config.GetDeploymentRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Deployment( + name="name_value", + state=config.Deployment.State.CREATING, + latest_revision="latest_revision_value", + state_detail="state_detail_value", + error_code=config.Deployment.ErrorCode.REVISION_FAILED, + delete_build="delete_build_value", + delete_logs="delete_logs_value", + error_logs="error_logs_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + lock_state=config.Deployment.LockState.LOCKED, + ) + ) + response = await client.get_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Deployment) + assert response.name == "name_value" + assert response.state == config.Deployment.State.CREATING + assert response.latest_revision == "latest_revision_value" + assert response.state_detail == "state_detail_value" + assert response.error_code == config.Deployment.ErrorCode.REVISION_FAILED + assert response.delete_build == "delete_build_value" + assert response.delete_logs == "delete_logs_value" + assert response.error_logs == "error_logs_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.service_account == "service_account_value" + assert response.import_existing_resources is True + assert response.worker_pool == "worker_pool_value" + assert response.lock_state == config.Deployment.LockState.LOCKED + + +@pytest.mark.asyncio +async def test_get_deployment_async_from_dict(): + await test_get_deployment_async(request_type=dict) + + +def test_get_deployment_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + call.return_value = config.Deployment() + client.get_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_deployment_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Deployment()) + await client.get_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_deployment_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Deployment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_deployment_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment( + config.GetDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_deployment_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Deployment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Deployment()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_deployment_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_deployment( + config.GetDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.CreateDeploymentRequest, + dict, + ], +) +def test_create_deployment(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.CreateDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + client.create_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.CreateDeploymentRequest() + + +@pytest.mark.asyncio +async def test_create_deployment_async( + transport: str = "grpc_asyncio", request_type=config.CreateDeploymentRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.CreateDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_deployment_async_from_dict(): + await test_create_deployment_async(request_type=dict) + + +def test_create_deployment_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.CreateDeploymentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_deployment_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.CreateDeploymentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_deployment_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_deployment( + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deployment + mock_val = config.Deployment( + terraform_blueprint=config.TerraformBlueprint(gcs_source="gcs_source_value") + ) + assert arg == mock_val + arg = args[0].deployment_id + mock_val = "deployment_id_value" + assert arg == mock_val + + +def test_create_deployment_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deployment( + config.CreateDeploymentRequest(), + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_deployment_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_deployment( + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deployment + mock_val = config.Deployment( + terraform_blueprint=config.TerraformBlueprint(gcs_source="gcs_source_value") + ) + assert arg == mock_val + arg = args[0].deployment_id + mock_val = "deployment_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_deployment_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_deployment( + config.CreateDeploymentRequest(), + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.UpdateDeploymentRequest, + dict, + ], +) +def test_update_deployment(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.UpdateDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment), "__call__" + ) as call: + client.update_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.UpdateDeploymentRequest() + + +@pytest.mark.asyncio +async def test_update_deployment_async( + transport: str = "grpc_asyncio", request_type=config.UpdateDeploymentRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.UpdateDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_deployment_async_from_dict(): + await test_update_deployment_async(request_type=dict) + + +def test_update_deployment_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.UpdateDeploymentRequest() + + request.deployment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "deployment.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_deployment_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.UpdateDeploymentRequest() + + request.deployment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "deployment.name=name_value", + ) in kw["metadata"] + + +def test_update_deployment_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_deployment( + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].deployment + mock_val = config.Deployment( + terraform_blueprint=config.TerraformBlueprint(gcs_source="gcs_source_value") + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_deployment_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deployment( + config.UpdateDeploymentRequest(), + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_deployment_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_deployment( + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].deployment + mock_val = config.Deployment( + terraform_blueprint=config.TerraformBlueprint(gcs_source="gcs_source_value") + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_deployment_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_deployment( + config.UpdateDeploymentRequest(), + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.DeleteDeploymentRequest, + dict, + ], +) +def test_delete_deployment(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeleteDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + client.delete_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeleteDeploymentRequest() + + +@pytest.mark.asyncio +async def test_delete_deployment_async( + transport: str = "grpc_asyncio", request_type=config.DeleteDeploymentRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeleteDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_deployment_async_from_dict(): + await test_delete_deployment_async(request_type=dict) + + +def test_delete_deployment_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeleteDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_deployment_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeleteDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_deployment_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_deployment_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deployment( + config.DeleteDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_deployment_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_deployment_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_deployment( + config.DeleteDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListRevisionsRequest, + dict, + ], +) +def test_list_revisions(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRevisionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_revisions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: + client.list_revisions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListRevisionsRequest() + + +@pytest.mark.asyncio +async def test_list_revisions_async( + transport: str = "grpc_asyncio", request_type=config.ListRevisionsRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRevisionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_revisions_async_from_dict(): + await test_list_revisions_async(request_type=dict) + + +def test_list_revisions_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListRevisionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: + call.return_value = config.ListRevisionsResponse() + client.list_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_revisions_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListRevisionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListRevisionsResponse() + ) + await client.list_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_revisions_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListRevisionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_revisions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_revisions_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_revisions( + config.ListRevisionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_revisions_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListRevisionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListRevisionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_revisions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_revisions_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_revisions( + config.ListRevisionsRequest(), + parent="parent_value", + ) + + +def test_list_revisions_pager(transport_name: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + config.Revision(), + ], + next_page_token="abc", + ), + config.ListRevisionsResponse( + revisions=[], + next_page_token="def", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + ], + next_page_token="ghi", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_revisions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Revision) for i in results) + + +def test_list_revisions_pages(transport_name: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + config.Revision(), + ], + next_page_token="abc", + ), + config.ListRevisionsResponse( + revisions=[], + next_page_token="def", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + ], + next_page_token="ghi", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + ], + ), + RuntimeError, + ) + pages = list(client.list_revisions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_revisions_async_pager(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_revisions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + config.Revision(), + ], + next_page_token="abc", + ), + config.ListRevisionsResponse( + revisions=[], + next_page_token="def", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + ], + next_page_token="ghi", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_revisions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, config.Revision) for i in responses) + + +@pytest.mark.asyncio +async def test_list_revisions_async_pages(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_revisions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + config.Revision(), + ], + next_page_token="abc", + ), + config.ListRevisionsResponse( + revisions=[], + next_page_token="def", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + ], + next_page_token="ghi", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_revisions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetRevisionRequest, + dict, + ], +) +def test_get_revision(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_revision), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Revision( + name="name_value", + action=config.Revision.Action.CREATE, + state=config.Revision.State.APPLYING, + state_detail="state_detail_value", + error_code=config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + logs="logs_value", + error_logs="error_logs_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + ) + response = client.get_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetRevisionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Revision) + assert response.name == "name_value" + assert response.action == config.Revision.Action.CREATE + assert response.state == config.Revision.State.APPLYING + assert response.state_detail == "state_detail_value" + assert ( + response.error_code == config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + ) + assert response.build == "build_value" + assert response.logs == "logs_value" + assert response.error_logs == "error_logs_value" + assert response.service_account == "service_account_value" + assert response.import_existing_resources is True + assert response.worker_pool == "worker_pool_value" + + +def test_get_revision_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_revision), "__call__") as call: + client.get_revision() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetRevisionRequest() + + +@pytest.mark.asyncio +async def test_get_revision_async( + transport: str = "grpc_asyncio", request_type=config.GetRevisionRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_revision), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Revision( + name="name_value", + action=config.Revision.Action.CREATE, + state=config.Revision.State.APPLYING, + state_detail="state_detail_value", + error_code=config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + logs="logs_value", + error_logs="error_logs_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + ) + ) + response = await client.get_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetRevisionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Revision) + assert response.name == "name_value" + assert response.action == config.Revision.Action.CREATE + assert response.state == config.Revision.State.APPLYING + assert response.state_detail == "state_detail_value" + assert ( + response.error_code == config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + ) + assert response.build == "build_value" + assert response.logs == "logs_value" + assert response.error_logs == "error_logs_value" + assert response.service_account == "service_account_value" + assert response.import_existing_resources is True + assert response.worker_pool == "worker_pool_value" + + +@pytest.mark.asyncio +async def test_get_revision_async_from_dict(): + await test_get_revision_async(request_type=dict) + + +def test_get_revision_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetRevisionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_revision), "__call__") as call: + call.return_value = config.Revision() + client.get_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_revision_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetRevisionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_revision), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Revision()) + await client.get_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_revision_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_revision), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Revision() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_revision( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_revision_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_revision( + config.GetRevisionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_revision_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_revision), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Revision() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Revision()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_revision( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_revision_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_revision( + config.GetRevisionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetResourceRequest, + dict, + ], +) +def test_get_resource(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Resource( + name="name_value", + intent=config.Resource.Intent.CREATE, + state=config.Resource.State.PLANNED, + ) + response = client.get_resource(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetResourceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Resource) + assert response.name == "name_value" + assert response.intent == config.Resource.Intent.CREATE + assert response.state == config.Resource.State.PLANNED + + +def test_get_resource_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + client.get_resource() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetResourceRequest() + + +@pytest.mark.asyncio +async def test_get_resource_async( + transport: str = "grpc_asyncio", request_type=config.GetResourceRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Resource( + name="name_value", + intent=config.Resource.Intent.CREATE, + state=config.Resource.State.PLANNED, + ) + ) + response = await client.get_resource(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetResourceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Resource) + assert response.name == "name_value" + assert response.intent == config.Resource.Intent.CREATE + assert response.state == config.Resource.State.PLANNED + + +@pytest.mark.asyncio +async def test_get_resource_async_from_dict(): + await test_get_resource_async(request_type=dict) + + +def test_get_resource_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetResourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + call.return_value = config.Resource() + client.get_resource(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_resource_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetResourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Resource()) + await client.get_resource(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_resource_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Resource() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_resource( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_resource_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_resource( + config.GetResourceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_resource_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Resource() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Resource()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_resource( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_resource_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_resource( + config.GetResourceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListResourcesRequest, + dict, + ], +) +def test_list_resources(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListResourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListResourcesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListResourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_resources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + client.list_resources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListResourcesRequest() + + +@pytest.mark.asyncio +async def test_list_resources_async( + transport: str = "grpc_asyncio", request_type=config.ListResourcesRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListResourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListResourcesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListResourcesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_resources_async_from_dict(): + await test_list_resources_async(request_type=dict) + + +def test_list_resources_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListResourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + call.return_value = config.ListResourcesResponse() + client.list_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_resources_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListResourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListResourcesResponse() + ) + await client.list_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_resources_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListResourcesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_resources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_resources_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_resources( + config.ListResourcesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_resources_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListResourcesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListResourcesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_resources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_resources_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_resources( + config.ListResourcesRequest(), + parent="parent_value", + ) + + +def test_list_resources_pager(transport_name: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + config.Resource(), + ], + next_page_token="abc", + ), + config.ListResourcesResponse( + resources=[], + next_page_token="def", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + ], + next_page_token="ghi", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_resources(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Resource) for i in results) + + +def test_list_resources_pages(transport_name: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + config.Resource(), + ], + next_page_token="abc", + ), + config.ListResourcesResponse( + resources=[], + next_page_token="def", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + ], + next_page_token="ghi", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + ], + ), + RuntimeError, + ) + pages = list(client.list_resources(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_resources_async_pager(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resources), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + config.Resource(), + ], + next_page_token="abc", + ), + config.ListResourcesResponse( + resources=[], + next_page_token="def", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + ], + next_page_token="ghi", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_resources( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, config.Resource) for i in responses) + + +@pytest.mark.asyncio +async def test_list_resources_async_pages(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resources), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + config.Resource(), + ], + next_page_token="abc", + ), + config.ListResourcesResponse( + resources=[], + next_page_token="def", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + ], + next_page_token="ghi", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_resources(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + config.ExportDeploymentStatefileRequest, + dict, + ], +) +def test_export_deployment_statefile(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_deployment_statefile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.Statefile( + signed_uri="signed_uri_value", + ) + response = client.export_deployment_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportDeploymentStatefileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" + + +def test_export_deployment_statefile_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_deployment_statefile), "__call__" + ) as call: + client.export_deployment_statefile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportDeploymentStatefileRequest() + + +@pytest.mark.asyncio +async def test_export_deployment_statefile_async( + transport: str = "grpc_asyncio", + request_type=config.ExportDeploymentStatefileRequest, +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_deployment_statefile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Statefile( + signed_uri="signed_uri_value", + ) + ) + response = await client.export_deployment_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportDeploymentStatefileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" + + +@pytest.mark.asyncio +async def test_export_deployment_statefile_async_from_dict(): + await test_export_deployment_statefile_async(request_type=dict) + + +def test_export_deployment_statefile_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ExportDeploymentStatefileRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_deployment_statefile), "__call__" + ) as call: + call.return_value = config.Statefile() + client.export_deployment_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_deployment_statefile_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ExportDeploymentStatefileRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_deployment_statefile), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Statefile()) + await client.export_deployment_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + config.ExportRevisionStatefileRequest, + dict, + ], +) +def test_export_revision_statefile(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_revision_statefile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.Statefile( + signed_uri="signed_uri_value", + ) + response = client.export_revision_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportRevisionStatefileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" + + +def test_export_revision_statefile_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_revision_statefile), "__call__" + ) as call: + client.export_revision_statefile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportRevisionStatefileRequest() + + +@pytest.mark.asyncio +async def test_export_revision_statefile_async( + transport: str = "grpc_asyncio", request_type=config.ExportRevisionStatefileRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_revision_statefile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Statefile( + signed_uri="signed_uri_value", + ) + ) + response = await client.export_revision_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportRevisionStatefileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" + + +@pytest.mark.asyncio +async def test_export_revision_statefile_async_from_dict(): + await test_export_revision_statefile_async(request_type=dict) + + +def test_export_revision_statefile_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ExportRevisionStatefileRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_revision_statefile), "__call__" + ) as call: + call.return_value = config.Statefile() + client.export_revision_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_revision_statefile_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ExportRevisionStatefileRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_revision_statefile), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Statefile()) + await client.export_revision_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + config.ImportStatefileRequest, + dict, + ], +) +def test_import_statefile(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Statefile( + signed_uri="signed_uri_value", + ) + response = client.import_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.ImportStatefileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" + + +def test_import_statefile_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: + client.import_statefile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ImportStatefileRequest() + + +@pytest.mark.asyncio +async def test_import_statefile_async( + transport: str = "grpc_asyncio", request_type=config.ImportStatefileRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Statefile( + signed_uri="signed_uri_value", + ) + ) + response = await client.import_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.ImportStatefileRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" + + +@pytest.mark.asyncio +async def test_import_statefile_async_from_dict(): + await test_import_statefile_async(request_type=dict) + + +def test_import_statefile_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ImportStatefileRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: + call.return_value = config.Statefile() + client.import_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_statefile_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ImportStatefileRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Statefile()) + await client.import_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_import_statefile_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Statefile() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.import_statefile( + parent="parent_value", + lock_id=725, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].lock_id + mock_val = 725 + assert arg == mock_val + + +def test_import_statefile_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_statefile( + config.ImportStatefileRequest(), + parent="parent_value", + lock_id=725, + ) + + +@pytest.mark.asyncio +async def test_import_statefile_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Statefile() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Statefile()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.import_statefile( + parent="parent_value", + lock_id=725, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].lock_id + mock_val = 725 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_import_statefile_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.import_statefile( + config.ImportStatefileRequest(), + parent="parent_value", + lock_id=725, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.DeleteStatefileRequest, + dict, + ], +) +def test_delete_statefile(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeleteStatefileRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_statefile_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: + client.delete_statefile() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeleteStatefileRequest() + + +@pytest.mark.asyncio +async def test_delete_statefile_async( + transport: str = "grpc_asyncio", request_type=config.DeleteStatefileRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeleteStatefileRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_statefile_async_from_dict(): + await test_delete_statefile_async(request_type=dict) + + +def test_delete_statefile_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeleteStatefileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: + call.return_value = None + client.delete_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_statefile_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeleteStatefileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_statefile_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_statefile( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_statefile_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_statefile( + config.DeleteStatefileRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_statefile_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_statefile( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_statefile_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_statefile( + config.DeleteStatefileRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.LockDeploymentRequest, + dict, + ], +) +def test_lock_deployment(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.lock_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.LockDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_lock_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: + client.lock_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.LockDeploymentRequest() + + +@pytest.mark.asyncio +async def test_lock_deployment_async( + transport: str = "grpc_asyncio", request_type=config.LockDeploymentRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.lock_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.LockDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_lock_deployment_async_from_dict(): + await test_lock_deployment_async(request_type=dict) + + +def test_lock_deployment_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.LockDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.lock_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_lock_deployment_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.LockDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.lock_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_lock_deployment_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.lock_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_lock_deployment_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.lock_deployment( + config.LockDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_lock_deployment_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.lock_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_lock_deployment_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.lock_deployment( + config.LockDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.UnlockDeploymentRequest, + dict, + ], +) +def test_unlock_deployment(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unlock_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.unlock_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.UnlockDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_unlock_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unlock_deployment), "__call__" + ) as call: + client.unlock_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.UnlockDeploymentRequest() + + +@pytest.mark.asyncio +async def test_unlock_deployment_async( + transport: str = "grpc_asyncio", request_type=config.UnlockDeploymentRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unlock_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.unlock_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.UnlockDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_unlock_deployment_async_from_dict(): + await test_unlock_deployment_async(request_type=dict) + + +def test_unlock_deployment_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.UnlockDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unlock_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.unlock_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_unlock_deployment_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.UnlockDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unlock_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.unlock_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_unlock_deployment_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unlock_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.unlock_deployment( + name="name_value", + lock_id=725, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].lock_id + mock_val = 725 + assert arg == mock_val + + +def test_unlock_deployment_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.unlock_deployment( + config.UnlockDeploymentRequest(), + name="name_value", + lock_id=725, + ) + + +@pytest.mark.asyncio +async def test_unlock_deployment_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unlock_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.unlock_deployment( + name="name_value", + lock_id=725, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].lock_id + mock_val = 725 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_unlock_deployment_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.unlock_deployment( + config.UnlockDeploymentRequest(), + name="name_value", + lock_id=725, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ExportLockInfoRequest, + dict, + ], +) +def test_export_lock_info(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.LockInfo( + lock_id=725, + operation="operation_value", + info="info_value", + who="who_value", + version="version_value", + ) + response = client.export_lock_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportLockInfoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.LockInfo) + assert response.lock_id == 725 + assert response.operation == "operation_value" + assert response.info == "info_value" + assert response.who == "who_value" + assert response.version == "version_value" + + +def test_export_lock_info_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: + client.export_lock_info() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportLockInfoRequest() + + +@pytest.mark.asyncio +async def test_export_lock_info_async( + transport: str = "grpc_asyncio", request_type=config.ExportLockInfoRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.LockInfo( + lock_id=725, + operation="operation_value", + info="info_value", + who="who_value", + version="version_value", + ) + ) + response = await client.export_lock_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportLockInfoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.LockInfo) + assert response.lock_id == 725 + assert response.operation == "operation_value" + assert response.info == "info_value" + assert response.who == "who_value" + assert response.version == "version_value" + + +@pytest.mark.asyncio +async def test_export_lock_info_async_from_dict(): + await test_export_lock_info_async(request_type=dict) + + +def test_export_lock_info_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ExportLockInfoRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: + call.return_value = config.LockInfo() + client.export_lock_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_lock_info_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ExportLockInfoRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.LockInfo()) + await client.export_lock_info(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_export_lock_info_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.LockInfo() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.export_lock_info( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_export_lock_info_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_lock_info( + config.ExportLockInfoRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_export_lock_info_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.LockInfo() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.LockInfo()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.export_lock_info( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_export_lock_info_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.export_lock_info( + config.ExportLockInfoRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListDeploymentsRequest, + dict, + ], +) +def test_list_deployments_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deployments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_deployments_rest_required_fields( + request_type=config.ListDeploymentsRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_deployments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_deployments_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deployments_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_deployments" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_list_deployments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.ListDeploymentsRequest.pb(config.ListDeploymentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.ListDeploymentsResponse.to_json( + config.ListDeploymentsResponse() + ) + + request = config.ListDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.ListDeploymentsResponse() + + client.list_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_deployments_rest_bad_request( + transport: str = "rest", request_type=config.ListDeploymentsRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deployments(request) + + +def test_list_deployments_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_deployments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_list_deployments_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployments( + config.ListDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_deployments_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + config.Deployment(), + ], + next_page_token="abc", + ), + config.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + ], + next_page_token="ghi", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListDeploymentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_deployments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Deployment) for i in results) + + pages = list(client.list_deployments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetDeploymentRequest, + dict, + ], +) +def test_get_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Deployment( + name="name_value", + state=config.Deployment.State.CREATING, + latest_revision="latest_revision_value", + state_detail="state_detail_value", + error_code=config.Deployment.ErrorCode.REVISION_FAILED, + delete_build="delete_build_value", + delete_logs="delete_logs_value", + error_logs="error_logs_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + lock_state=config.Deployment.LockState.LOCKED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Deployment) + assert response.name == "name_value" + assert response.state == config.Deployment.State.CREATING + assert response.latest_revision == "latest_revision_value" + assert response.state_detail == "state_detail_value" + assert response.error_code == config.Deployment.ErrorCode.REVISION_FAILED + assert response.delete_build == "delete_build_value" + assert response.delete_logs == "delete_logs_value" + assert response.error_logs == "error_logs_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.service_account == "service_account_value" + assert response.import_existing_resources is True + assert response.worker_pool == "worker_pool_value" + assert response.lock_state == config.Deployment.LockState.LOCKED + + +def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRequest): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.GetDeploymentRequest.pb(config.GetDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.Deployment.to_json(config.Deployment()) + + request = config.GetDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.Deployment() + + client.get_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_deployment_rest_bad_request( + transport: str = "rest", request_type=config.GetDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deployment(request) + + +def test_get_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Deployment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment( + config.GetDeploymentRequest(), + name="name_value", + ) + + +def test_get_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.CreateDeploymentRequest, + dict, + ], +) +def test_create_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deployment"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "latest_revision": "latest_revision_value", + "state_detail": "state_detail_value", + "error_code": 1, + "delete_results": { + "content": "content_value", + "artifacts": "artifacts_value", + "outputs": {}, + }, + "delete_build": "delete_build_value", + "delete_logs": "delete_logs_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + } + ], + "error_logs": "error_logs_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "service_account": "service_account_value", + "import_existing_resources": True, + "worker_pool": "worker_pool_value", + "lock_state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deployment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_deployment_rest_required_fields( + request_type=config.CreateDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["deployment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "deploymentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == request_init["deployment_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["deploymentId"] = "deployment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "deployment_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == "deployment_id_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_deployment(request) + + expected_params = [ + ( + "deploymentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deploymentId", + "requestId", + ) + ) + & set( + ( + "parent", + "deploymentId", + "deployment", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_create_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_create_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.CreateDeploymentRequest.pb(config.CreateDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = config.CreateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deployment_rest_bad_request( + transport: str = "rest", request_type=config.CreateDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deployment"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "latest_revision": "latest_revision_value", + "state_detail": "state_detail_value", + "error_code": 1, + "delete_results": { + "content": "content_value", + "artifacts": "artifacts_value", + "outputs": {}, + }, + "delete_build": "delete_build_value", + "delete_logs": "delete_logs_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + } + ], + "error_logs": "error_logs_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "service_account": "service_account_value", + "import_existing_resources": True, + "worker_pool": "worker_pool_value", + "lock_state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deployment(request) + + +def test_create_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_create_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deployment( + config.CreateDeploymentRequest(), + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + + +def test_create_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.UpdateDeploymentRequest, + dict, + ], +) +def test_update_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request_init["deployment"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + }, + "name": "projects/sample1/locations/sample2/deployments/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "latest_revision": "latest_revision_value", + "state_detail": "state_detail_value", + "error_code": 1, + "delete_results": { + "content": "content_value", + "artifacts": "artifacts_value", + "outputs": {}, + }, + "delete_build": "delete_build_value", + "delete_logs": "delete_logs_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + } + ], + "error_logs": "error_logs_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "service_account": "service_account_value", + "import_existing_resources": True, + "worker_pool": "worker_pool_value", + "lock_state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deployment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_deployment_rest_required_fields( + request_type=config.UpdateDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("deployment",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_update_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_update_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.UpdateDeploymentRequest.pb(config.UpdateDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = config.UpdateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_deployment_rest_bad_request( + transport: str = "rest", request_type=config.UpdateDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request_init["deployment"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + }, + "name": "projects/sample1/locations/sample2/deployments/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "latest_revision": "latest_revision_value", + "state_detail": "state_detail_value", + "error_code": 1, + "delete_results": { + "content": "content_value", + "artifacts": "artifacts_value", + "outputs": {}, + }, + "delete_build": "delete_build_value", + "delete_logs": "delete_logs_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + } + ], + "error_logs": "error_logs_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "service_account": "service_account_value", + "import_existing_resources": True, + "worker_pool": "worker_pool_value", + "lock_state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deployment(request) + + +def test_update_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "deployment": { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deployment( + config.UpdateDeploymentRequest(), + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.DeleteDeploymentRequest, + dict, + ], +) +def test_delete_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deployment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_deployment_rest_required_fields( + request_type=config.DeleteDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "delete_policy", + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deletePolicy", + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_delete_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_delete_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.DeleteDeploymentRequest.pb(config.DeleteDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = config.DeleteDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_deployment_rest_bad_request( + transport: str = "rest", request_type=config.DeleteDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deployment(request) + + +def test_delete_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deployment( + config.DeleteDeploymentRequest(), + name="name_value", + ) + + +def test_delete_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListRevisionsRequest, + dict, + ], +) +def test_list_revisions_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.ListRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_revisions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRevisionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRequest): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_revisions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_revisions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ListRevisionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = config.ListRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_revisions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_revisions_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_revisions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_revisions_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_revisions" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_list_revisions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.ListRevisionsRequest.pb(config.ListRevisionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.ListRevisionsResponse.to_json( + config.ListRevisionsResponse() + ) + + request = config.ListRevisionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.ListRevisionsResponse() + + client.list_revisions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_revisions_rest_bad_request( + transport: str = "rest", request_type=config.ListRevisionsRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_revisions(request) + + +def test_list_revisions_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListRevisionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.ListRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_revisions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/deployments/*}/revisions" + % client.transport._host, + args[1], + ) + + +def test_list_revisions_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_revisions( + config.ListRevisionsRequest(), + parent="parent_value", + ) + + +def test_list_revisions_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + config.Revision(), + ], + next_page_token="abc", + ), + config.ListRevisionsResponse( + revisions=[], + next_page_token="def", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + ], + next_page_token="ghi", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListRevisionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3" + } + + pager = client.list_revisions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Revision) for i in results) + + pages = list(client.list_revisions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetRevisionRequest, + dict, + ], +) +def test_get_revision_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Revision( + name="name_value", + action=config.Revision.Action.CREATE, + state=config.Revision.State.APPLYING, + state_detail="state_detail_value", + error_code=config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + logs="logs_value", + error_logs="error_logs_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.Revision.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_revision(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Revision) + assert response.name == "name_value" + assert response.action == config.Revision.Action.CREATE + assert response.state == config.Revision.State.APPLYING + assert response.state_detail == "state_detail_value" + assert ( + response.error_code == config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + ) + assert response.build == "build_value" + assert response.logs == "logs_value" + assert response.error_logs == "error_logs_value" + assert response.service_account == "service_account_value" + assert response.import_existing_resources is True + assert response.worker_pool == "worker_pool_value" + + +def test_get_revision_rest_required_fields(request_type=config.GetRevisionRequest): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_revision._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_revision._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.Revision() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = config.Revision.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_revision(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_revision_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_revision._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_revision_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_revision" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_revision" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.GetRevisionRequest.pb(config.GetRevisionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.Revision.to_json(config.Revision()) + + request = config.GetRevisionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.Revision() + + client.get_revision( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_revision_rest_bad_request( + transport: str = "rest", request_type=config.GetRevisionRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_revision(request) + + +def test_get_revision_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Revision() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.Revision.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_revision(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_revision_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_revision( + config.GetRevisionRequest(), + name="name_value", + ) + + +def test_get_revision_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetResourceRequest, + dict, + ], +) +def test_get_resource_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Resource( + name="name_value", + intent=config.Resource.Intent.CREATE, + state=config.Resource.State.PLANNED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.Resource.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_resource(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Resource) + assert response.name == "name_value" + assert response.intent == config.Resource.Intent.CREATE + assert response.state == config.Resource.State.PLANNED + + +def test_get_resource_rest_required_fields(request_type=config.GetResourceRequest): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_resource._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_resource._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.Resource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = config.Resource.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_resource(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_resource_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_resource._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_resource_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_resource" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_resource" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.GetResourceRequest.pb(config.GetResourceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.Resource.to_json(config.Resource()) + + request = config.GetResourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.Resource() + + client.get_resource( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_resource_rest_bad_request( + transport: str = "rest", request_type=config.GetResourceRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_resource(request) + + +def test_get_resource_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Resource() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.Resource.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_resource(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*/resources/*}" + % client.transport._host, + args[1], + ) + + +def test_get_resource_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_resource( + config.GetResourceRequest(), + name="name_value", + ) + + +def test_get_resource_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListResourcesRequest, + dict, + ], +) +def test_list_resources_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListResourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.ListResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListResourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_resources_rest_required_fields(request_type=config.ListResourcesRequest): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_resources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ListResourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = config.ListResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_resources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_resources_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_resources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_resources_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_resources" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_list_resources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.ListResourcesRequest.pb(config.ListResourcesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.ListResourcesResponse.to_json( + config.ListResourcesResponse() + ) + + request = config.ListResourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.ListResourcesResponse() + + client.list_resources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_resources_rest_bad_request( + transport: str = "rest", request_type=config.ListResourcesRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_resources(request) + + +def test_list_resources_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListResourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.ListResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_resources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/deployments/*/revisions/*}/resources" + % client.transport._host, + args[1], + ) + + +def test_list_resources_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_resources( + config.ListResourcesRequest(), + parent="parent_value", + ) + + +def test_list_resources_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + config.Resource(), + ], + next_page_token="abc", + ), + config.ListResourcesResponse( + resources=[], + next_page_token="def", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + ], + next_page_token="ghi", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListResourcesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + + pager = client.list_resources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Resource) for i in results) + + pages = list(client.list_resources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + config.ExportDeploymentStatefileRequest, + dict, + ], +) +def test_export_deployment_statefile_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Statefile( + signed_uri="signed_uri_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.Statefile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_deployment_statefile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" + + +def test_export_deployment_statefile_rest_required_fields( + request_type=config.ExportDeploymentStatefileRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_deployment_statefile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_deployment_statefile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.Statefile() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = config.Statefile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_deployment_statefile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_deployment_statefile_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_deployment_statefile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_deployment_statefile_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_export_deployment_statefile" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_export_deployment_statefile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.ExportDeploymentStatefileRequest.pb( + config.ExportDeploymentStatefileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.Statefile.to_json(config.Statefile()) + + request = config.ExportDeploymentStatefileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.Statefile() + + client.export_deployment_statefile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_deployment_statefile_rest_bad_request( + transport: str = "rest", request_type=config.ExportDeploymentStatefileRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_deployment_statefile(request) + + +def test_export_deployment_statefile_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ExportRevisionStatefileRequest, + dict, + ], +) +def test_export_revision_statefile_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Statefile( + signed_uri="signed_uri_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.Statefile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_revision_statefile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" + + +def test_export_revision_statefile_rest_required_fields( + request_type=config.ExportRevisionStatefileRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_revision_statefile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_revision_statefile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.Statefile() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = config.Statefile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_revision_statefile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_revision_statefile_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_revision_statefile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_revision_statefile_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_export_revision_statefile" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_export_revision_statefile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.ExportRevisionStatefileRequest.pb( + config.ExportRevisionStatefileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.Statefile.to_json(config.Statefile()) + + request = config.ExportRevisionStatefileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.Statefile() + + client.export_revision_statefile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_revision_statefile_rest_bad_request( + transport: str = "rest", request_type=config.ExportRevisionStatefileRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_revision_statefile(request) + + +def test_export_revision_statefile_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ImportStatefileRequest, + dict, + ], +) +def test_import_statefile_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Statefile( + signed_uri="signed_uri_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.Statefile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_statefile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" + + +def test_import_statefile_rest_required_fields( + request_type=config.ImportStatefileRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["lock_id"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_statefile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["lockId"] = 725 + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_statefile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.Statefile() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = config.Statefile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_statefile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_statefile_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.import_statefile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "lockId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_statefile_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_import_statefile" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_import_statefile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.ImportStatefileRequest.pb(config.ImportStatefileRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.Statefile.to_json(config.Statefile()) + + request = config.ImportStatefileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.Statefile() + + client.import_statefile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_statefile_rest_bad_request( + transport: str = "rest", request_type=config.ImportStatefileRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_statefile(request) + + +def test_import_statefile_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Statefile() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + lock_id=725, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.Statefile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.import_statefile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/deployments/*}:importState" + % client.transport._host, + args[1], + ) + + +def test_import_statefile_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_statefile( + config.ImportStatefileRequest(), + parent="parent_value", + lock_id=725, + ) + + +def test_import_statefile_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.DeleteStatefileRequest, + dict, + ], +) +def test_delete_statefile_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_statefile(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_statefile_rest_required_fields( + request_type=config.DeleteStatefileRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request_init["lock_id"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_statefile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["lockId"] = 725 + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_statefile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_statefile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_statefile_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_statefile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "lockId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_statefile_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "pre_delete_statefile" + ) as pre: + pre.assert_not_called() + pb_message = config.DeleteStatefileRequest.pb(config.DeleteStatefileRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = config.DeleteStatefileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_statefile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_statefile_rest_bad_request( + transport: str = "rest", request_type=config.DeleteStatefileRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_statefile(request) + + +def test_delete_statefile_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_statefile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}:deleteState" + % client.transport._host, + args[1], + ) + + +def test_delete_statefile_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_statefile( + config.DeleteStatefileRequest(), + name="name_value", + ) + + +def test_delete_statefile_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.LockDeploymentRequest, + dict, + ], +) +def test_lock_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lock_deployment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_lock_deployment_rest_required_fields( + request_type=config.LockDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lock_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lock_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.lock_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_lock_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.lock_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lock_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_lock_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_lock_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.LockDeploymentRequest.pb(config.LockDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = config.LockDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.lock_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_lock_deployment_rest_bad_request( + transport: str = "rest", request_type=config.LockDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lock_deployment(request) + + +def test_lock_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lock_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}:lock" + % client.transport._host, + args[1], + ) + + +def test_lock_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.lock_deployment( + config.LockDeploymentRequest(), + name="name_value", + ) + + +def test_lock_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.UnlockDeploymentRequest, + dict, + ], +) +def test_unlock_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.unlock_deployment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_unlock_deployment_rest_required_fields( + request_type=config.UnlockDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request_init["lock_id"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).unlock_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["lockId"] = 725 + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).unlock_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.unlock_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_unlock_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.unlock_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "lockId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_unlock_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_unlock_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_unlock_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.UnlockDeploymentRequest.pb(config.UnlockDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = config.UnlockDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.unlock_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_unlock_deployment_rest_bad_request( + transport: str = "rest", request_type=config.UnlockDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.unlock_deployment(request) + + +def test_unlock_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + lock_id=725, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.unlock_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}:unlock" + % client.transport._host, + args[1], + ) + + +def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.unlock_deployment( + config.UnlockDeploymentRequest(), + name="name_value", + lock_id=725, + ) + + +def test_unlock_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ExportLockInfoRequest, + dict, + ], +) +def test_export_lock_info_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.LockInfo( + lock_id=725, + operation="operation_value", + info="info_value", + who="who_value", + version="version_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.LockInfo.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_lock_info(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.LockInfo) + assert response.lock_id == 725 + assert response.operation == "operation_value" + assert response.info == "info_value" + assert response.who == "who_value" + assert response.version == "version_value" + + +def test_export_lock_info_rest_required_fields( + request_type=config.ExportLockInfoRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_lock_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_lock_info._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.LockInfo() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = config.LockInfo.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_lock_info(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_lock_info_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_lock_info._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_lock_info_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_export_lock_info" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_export_lock_info" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.ExportLockInfoRequest.pb(config.ExportLockInfoRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.LockInfo.to_json(config.LockInfo()) + + request = config.ExportLockInfoRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.LockInfo() + + client.export_lock_info( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_lock_info_rest_bad_request( + transport: str = "rest", request_type=config.ExportLockInfoRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_lock_info(request) + + +def test_export_lock_info_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.LockInfo() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = config.LockInfo.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.export_lock_info(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}:exportLock" + % client.transport._host, + args[1], + ) + + +def test_export_lock_info_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_lock_info( + config.ExportLockInfoRequest(), + name="name_value", + ) + + +def test_export_lock_info_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ConfigClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConfigGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigGrpcTransport, + transports.ConfigGrpcAsyncIOTransport, + transports.ConfigRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ConfigClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ConfigGrpcTransport, + ) + + +def test_config_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ConfigTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_config_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.config_v1.services.config.transports.ConfigTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ConfigTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_deployments", + "get_deployment", + "create_deployment", + "update_deployment", + "delete_deployment", + "list_revisions", + "get_revision", + "get_resource", + "list_resources", + "export_deployment_statefile", + "export_revision_statefile", + "import_statefile", + "delete_statefile", + "lock_deployment", + "unlock_deployment", + "export_lock_info", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_config_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.config_v1.services.config.transports.ConfigTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_config_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.config_v1.services.config.transports.ConfigTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigTransport() + adc.assert_called_once() + + +def test_config_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConfigClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigGrpcTransport, + transports.ConfigGrpcAsyncIOTransport, + ], +) +def test_config_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigGrpcTransport, + transports.ConfigGrpcAsyncIOTransport, + transports.ConfigRestTransport, + ], +) +def test_config_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigGrpcTransport, grpc_helpers), + (transports.ConfigGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_config_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "config.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="config.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.ConfigGrpcTransport, transports.ConfigGrpcAsyncIOTransport], +) +def test_config_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_config_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ConfigRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_config_rest_lro_client(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_config_host_no_port(transport_name): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="config.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "config.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://config.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_config_host_with_port(transport_name): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="config.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "config.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://config.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_config_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ConfigClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ConfigClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_deployments._session + session2 = client2.transport.list_deployments._session + assert session1 != session2 + session1 = client1.transport.get_deployment._session + session2 = client2.transport.get_deployment._session + assert session1 != session2 + session1 = client1.transport.create_deployment._session + session2 = client2.transport.create_deployment._session + assert session1 != session2 + session1 = client1.transport.update_deployment._session + session2 = client2.transport.update_deployment._session + assert session1 != session2 + session1 = client1.transport.delete_deployment._session + session2 = client2.transport.delete_deployment._session + assert session1 != session2 + session1 = client1.transport.list_revisions._session + session2 = client2.transport.list_revisions._session + assert session1 != session2 + session1 = client1.transport.get_revision._session + session2 = client2.transport.get_revision._session + assert session1 != session2 + session1 = client1.transport.get_resource._session + session2 = client2.transport.get_resource._session + assert session1 != session2 + session1 = client1.transport.list_resources._session + session2 = client2.transport.list_resources._session + assert session1 != session2 + session1 = client1.transport.export_deployment_statefile._session + session2 = client2.transport.export_deployment_statefile._session + assert session1 != session2 + session1 = client1.transport.export_revision_statefile._session + session2 = client2.transport.export_revision_statefile._session + assert session1 != session2 + session1 = client1.transport.import_statefile._session + session2 = client2.transport.import_statefile._session + assert session1 != session2 + session1 = client1.transport.delete_statefile._session + session2 = client2.transport.delete_statefile._session + assert session1 != session2 + session1 = client1.transport.lock_deployment._session + session2 = client2.transport.lock_deployment._session + assert session1 != session2 + session1 = client1.transport.unlock_deployment._session + session2 = client2.transport.unlock_deployment._session + assert session1 != session2 + session1 = client1.transport.export_lock_info._session + session2 = client2.transport.export_lock_info._session + assert session1 != session2 + + +def test_config_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConfigGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_config_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConfigGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ConfigGrpcTransport, transports.ConfigGrpcAsyncIOTransport], +) +def test_config_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ConfigGrpcTransport, transports.ConfigGrpcAsyncIOTransport], +) +def test_config_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_config_grpc_lro_client(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_config_grpc_lro_async_client(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_deployment_path(): + project = "squid" + location = "clam" + deployment = "whelk" + expected = ( + "projects/{project}/locations/{location}/deployments/{deployment}".format( + project=project, + location=location, + deployment=deployment, + ) + ) + actual = ConfigClient.deployment_path(project, location, deployment) + assert expected == actual + + +def test_parse_deployment_path(): + expected = { + "project": "octopus", + "location": "oyster", + "deployment": "nudibranch", + } + path = ConfigClient.deployment_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_deployment_path(path) + assert expected == actual + + +def test_resource_path(): + project = "cuttlefish" + location = "mussel" + deployment = "winkle" + revision = "nautilus" + resource = "scallop" + expected = "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}/resources/{resource}".format( + project=project, + location=location, + deployment=deployment, + revision=revision, + resource=resource, + ) + actual = ConfigClient.resource_path( + project, location, deployment, revision, resource + ) + assert expected == actual + + +def test_parse_resource_path(): + expected = { + "project": "abalone", + "location": "squid", + "deployment": "clam", + "revision": "whelk", + "resource": "octopus", + } + path = ConfigClient.resource_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_resource_path(path) + assert expected == actual + + +def test_revision_path(): + project = "oyster" + location = "nudibranch" + deployment = "cuttlefish" + revision = "mussel" + expected = "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}".format( + project=project, + location=location, + deployment=deployment, + revision=revision, + ) + actual = ConfigClient.revision_path(project, location, deployment, revision) + assert expected == actual + + +def test_parse_revision_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "deployment": "scallop", + "revision": "abalone", + } + path = ConfigClient.revision_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_revision_path(path) + assert expected == actual + + +def test_service_account_path(): + project = "squid" + service_account = "clam" + expected = "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) + actual = ConfigClient.service_account_path(project, service_account) + assert expected == actual + + +def test_parse_service_account_path(): + expected = { + "project": "whelk", + "service_account": "octopus", + } + path = ConfigClient.service_account_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_service_account_path(path) + assert expected == actual + + +def test_worker_pool_path(): + project = "oyster" + location = "nudibranch" + worker_pool = "cuttlefish" + expected = ( + "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( + project=project, + location=location, + worker_pool=worker_pool, + ) + ) + actual = ConfigClient.worker_pool_path(project, location, worker_pool) + assert expected == actual + + +def test_parse_worker_pool_path(): + expected = { + "project": "mussel", + "location": "winkle", + "worker_pool": "nautilus", + } + path = ConfigClient.worker_pool_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_worker_pool_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ConfigClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = ConfigClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ConfigClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = ConfigClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ConfigClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = ConfigClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = ConfigClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = ConfigClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ConfigClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = ConfigClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ConfigTransport, "_prep_wrapped_messages" + ) as prep: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ConfigTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ConfigClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/deployments/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/deployments/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/deployments/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/deployments/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/deployments/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = { + "resource": "projects/sample1/locations/sample2/deployments/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = ConfigClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = ConfigAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ConfigClient, transports.ConfigGrpcTransport), + (ConfigAsyncClient, transports.ConfigGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-contact-center-insights/.flake8 b/packages/google-cloud-contact-center-insights/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-contact-center-insights/.flake8 +++ b/packages/google-cloud-contact-center-insights/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-contact-center-insights/CHANGELOG.md b/packages/google-cloud-contact-center-insights/CHANGELOG.md index 320ec7f90387..434e3422dc06 100644 --- a/packages/google-cloud-contact-center-insights/CHANGELOG.md +++ b/packages/google-cloud-contact-center-insights/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.12.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contact-center-insights-v1.12.0...google-cloud-contact-center-insights-v1.12.1) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + ## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contact-center-insights-v1.11.1...google-cloud-contact-center-insights-v1.12.0) (2023-07-06) diff --git a/packages/google-cloud-contact-center-insights/CONTRIBUTING.rst b/packages/google-cloud-contact-center-insights/CONTRIBUTING.rst index 0bef99192b45..aa972d9de3fb 100644 --- a/packages/google-cloud-contact-center-insights/CONTRIBUTING.rst +++ b/packages/google-cloud-contact-center-insights/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-contact-center-insights/noxfile.py ********** diff --git a/packages/google-cloud-contact-center-insights/MANIFEST.in b/packages/google-cloud-contact-center-insights/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-contact-center-insights/MANIFEST.in +++ b/packages/google-cloud-contact-center-insights/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-contact-center-insights/README.rst b/packages/google-cloud-contact-center-insights/README.rst index 6a10caf6d4e1..edf3255ee52a 100644 --- a/packages/google-cloud-contact-center-insights/README.rst +++ b/packages/google-cloud-contact-center-insights/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-contact-center-insights + pip install google-cloud-contact-center-insights Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-contact-center-insights + py -m venv + .\\Scripts\activate + pip install google-cloud-contact-center-insights Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-contact-center-insights/docs/conf.py b/packages/google-cloud-contact-center-insights/docs/conf.py index 06cfceec4287..3873f233cd7d 100644 --- a/packages/google-cloud-contact-center-insights/docs/conf.py +++ b/packages/google-cloud-contact-center-insights/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py index 360a0d13ebdd..ac3bc60c3fb3 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py index 360a0d13ebdd..ac3bc60c3fb3 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py index 53c8d2a9f5b9..4cb3e4748b15 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py @@ -44,7 +44,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -1190,6 +1190,7 @@ async def sample_list_analyses(): Returns: google.cloud.contact_center_insights_v1.services.contact_center_insights.pagers.ListAnalysesAsyncPager: The response to list analyses. + Iterating over this object will yield results and resolve additional pages automatically. @@ -4166,6 +4167,7 @@ async def sample_list_views(): Returns: google.cloud.contact_center_insights_v1.services.contact_center_insights.pagers.ListViewsAsyncPager: The response of listing views. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py index 70d7cf00079a..a5dafefc6ed7 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py @@ -48,7 +48,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -1568,6 +1568,7 @@ def sample_list_analyses(): Returns: google.cloud.contact_center_insights_v1.services.contact_center_insights.pagers.ListAnalysesPager: The response to list analyses. + Iterating over this object will yield results and resolve additional pages automatically. @@ -4552,6 +4553,7 @@ def sample_list_views(): Returns: google.cloud.contact_center_insights_v1.services.contact_center_insights.pagers.ListViewsPager: The response of listing views. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py index a44b3ad0e698..4153fd2f4fd3 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py @@ -32,7 +32,6 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py index 7def96beb3cf..e5953d126498 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py @@ -1003,6 +1003,7 @@ class ExportInsightsDataRequest(proto.Message): kms_key (str): A fully qualified KMS key name for BigQuery tables protected by CMEK. Format: + projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}/cryptoKeyVersions/{version} write_disposition (google.cloud.contact_center_insights_v1.types.ExportInsightsDataRequest.WriteDisposition): Options for what to do if the destination diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py index 1d67725d073c..39b9ceb9fd7d 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py @@ -101,6 +101,7 @@ class Conversation(proto.Message): name (str): Immutable. The resource name of the conversation. Format: + projects/{project}/locations/{location}/conversations/{conversation} data_source (google.cloud.contact_center_insights_v1.types.ConversationDataSource): The source of the audio and transcription for @@ -149,6 +150,7 @@ class Conversation(proto.Message): Output only. All the matched Dialogflow intents in the call. The key corresponds to a Dialogflow intent, format: + projects/{project}/agent/{agent}/intents/{intent} obfuscated_user_id (str): Obfuscated user ID which the customer sent to @@ -451,6 +453,7 @@ class Analysis(proto.Message): name (str): Immutable. The resource name of the analysis. Format: + projects/{project}/locations/{location}/conversations/{conversation}/analyses/{analysis} request_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time at which the analysis @@ -562,6 +565,7 @@ class DialogflowSource(proto.Message): Output only. The name of the Dialogflow conversation that this conversation resource is derived from. Format: + projects/{project}/locations/{location}/conversations/{conversation} audio_uri (str): Cloud Storage URI that points to a file that @@ -977,6 +981,7 @@ class Type(proto.Enum): - ``day`` - Two digit day number, if detected. NUMBER (12): Number. + The metadata is the number itself. PRICE (13): Price. @@ -1683,6 +1688,7 @@ class Settings(proto.Message): name (str): Immutable. The resource name of the settings resource. Format: + projects/{project}/locations/{location}/settings create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time at which the settings @@ -2340,6 +2346,7 @@ class View(proto.Message): name (str): Immutable. The resource name of the view. Format: + projects/{project}/locations/{location}/views/{view} display_name (str): The human-readable display name of the view. diff --git a/packages/google-cloud-contact-center-insights/noxfile.py b/packages/google-cloud-contact-center-insights/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-contact-center-insights/noxfile.py +++ b/packages/google-cloud-contact-center-insights/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json index c3e9d3df64ac..d94d393ac183 100644 --- a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contact-center-insights", - "version": "0.1.0" + "version": "1.12.1" }, "snippets": [ { diff --git a/packages/google-cloud-contact-center-insights/scripts/decrypt-secrets.sh b/packages/google-cloud-contact-center-insights/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-contact-center-insights/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-contact-center-insights/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py index d4da249a3b12..da906d57ce8f 100644 --- a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py @@ -41,7 +41,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-contentwarehouse/.flake8 b/packages/google-cloud-contentwarehouse/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-contentwarehouse/.flake8 +++ b/packages/google-cloud-contentwarehouse/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-contentwarehouse/CHANGELOG.md b/packages/google-cloud-contentwarehouse/CHANGELOG.md index 475cfdf1e778..d8a4e00771ef 100644 --- a/packages/google-cloud-contentwarehouse/CHANGELOG.md +++ b/packages/google-cloud-contentwarehouse/CHANGELOG.md @@ -1,5 +1,38 @@ # Changelog +## [0.7.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.7.0...google-cloud-contentwarehouse-v0.7.1) (2023-09-19) + + +### Features + +* add `cloud_function`fields to support new ingestion option in Document Warehouse API v1 ([61a2c29](https://github.com/googleapis/google-cloud-python/commit/61a2c2902dedd1668534310220e0e53a36718d2d)) +* add `ROOT_FOLDER` field to Document Warehouse API v1 ([61a2c29](https://github.com/googleapis/google-cloud-python/commit/61a2c2902dedd1668534310220e0e53a36718d2d)) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + +## [0.7.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.6.0...google-cloud-contentwarehouse-v0.7.0) (2023-08-16) + + +### Features + +* add Pipeline Service to Document Warehouse API v1 ([#11583](https://github.com/googleapis/google-cloud-python/issues/11583)) ([974cf2e](https://github.com/googleapis/google-cloud-python/commit/974cf2ec443fb1290b86e8978e8ad88dc5700278)) + +## [0.6.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.5.1...google-cloud-contentwarehouse-v0.6.0) (2023-08-11) + + +### Features + +* Add `disposition_time`, `legal_hold`, `document_is_retention_folder`, `document_is_legal_hold_folder`, `question_answer`, `document_name_filter` fields ([d0432c7](https://github.com/googleapis/google-cloud-python/commit/d0432c7b7754f816c8d3d95f1ea6f9349b930717)) +* Add `ON_CREATE_LINK`, `ON_DELETE_LINK` fields to support new rule engine triggers ([d0432c7](https://github.com/googleapis/google-cloud-python/commit/d0432c7b7754f816c8d3d95f1ea6f9349b930717)) + + +### Documentation + +* Deprecate DB_CLOUD_SQL_POSTGRES ([d0432c7](https://github.com/googleapis/google-cloud-python/commit/d0432c7b7754f816c8d3d95f1ea6f9349b930717)) + ## [0.5.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.5.0...google-cloud-contentwarehouse-v0.5.1) (2023-07-05) diff --git a/packages/google-cloud-contentwarehouse/CONTRIBUTING.rst b/packages/google-cloud-contentwarehouse/CONTRIBUTING.rst index 8e1024a87af6..ddde08b340d6 100644 --- a/packages/google-cloud-contentwarehouse/CONTRIBUTING.rst +++ b/packages/google-cloud-contentwarehouse/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-contentwarehouse/noxfile.py ********** diff --git a/packages/google-cloud-contentwarehouse/MANIFEST.in b/packages/google-cloud-contentwarehouse/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-contentwarehouse/MANIFEST.in +++ b/packages/google-cloud-contentwarehouse/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-contentwarehouse/README.rst b/packages/google-cloud-contentwarehouse/README.rst index b33a61166cf8..f2d1701503fc 100644 --- a/packages/google-cloud-contentwarehouse/README.rst +++ b/packages/google-cloud-contentwarehouse/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-contentwarehouse + pip install google-cloud-contentwarehouse Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-contentwarehouse + py -m venv + .\\Scripts\activate + pip install google-cloud-contentwarehouse Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-contentwarehouse/docs/conf.py b/packages/google-cloud-contentwarehouse/docs/conf.py index 63880af5d0cc..f0ed6c3f14b1 100644 --- a/packages/google-cloud-contentwarehouse/docs/conf.py +++ b/packages/google-cloud-contentwarehouse/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-contentwarehouse/docs/contentwarehouse_v1/pipeline_service.rst b/packages/google-cloud-contentwarehouse/docs/contentwarehouse_v1/pipeline_service.rst new file mode 100644 index 000000000000..6c62f7efa279 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/docs/contentwarehouse_v1/pipeline_service.rst @@ -0,0 +1,6 @@ +PipelineService +--------------------------------- + +.. automodule:: google.cloud.contentwarehouse_v1.services.pipeline_service + :members: + :inherited-members: diff --git a/packages/google-cloud-contentwarehouse/docs/contentwarehouse_v1/services.rst b/packages/google-cloud-contentwarehouse/docs/contentwarehouse_v1/services.rst index 99352d9243ea..3c2654988c59 100644 --- a/packages/google-cloud-contentwarehouse/docs/contentwarehouse_v1/services.rst +++ b/packages/google-cloud-contentwarehouse/docs/contentwarehouse_v1/services.rst @@ -6,5 +6,6 @@ Services for Google Cloud Contentwarehouse v1 API document_link_service document_schema_service document_service + pipeline_service rule_set_service synonym_set_service diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/__init__.py index 83ac8ff2b6bf..cd88f21f9c56 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/__init__.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/__init__.py @@ -36,6 +36,12 @@ from google.cloud.contentwarehouse_v1.services.document_service.client import ( DocumentServiceClient, ) +from google.cloud.contentwarehouse_v1.services.pipeline_service.async_client import ( + PipelineServiceAsyncClient, +) +from google.cloud.contentwarehouse_v1.services.pipeline_service.client import ( + PipelineServiceClient, +) from google.cloud.contentwarehouse_v1.services.rule_set_service.async_client import ( RuleSetServiceAsyncClient, ) @@ -142,6 +148,17 @@ HistogramQueryPropertyNameFilter, HistogramQueryResult, ) +from google.cloud.contentwarehouse_v1.types.pipeline_service import RunPipelineRequest +from google.cloud.contentwarehouse_v1.types.pipelines import ( + ExportToCdwPipeline, + GcsIngestPipeline, + GcsIngestWithDocAiProcessorsPipeline, + IngestPipelineConfig, + ProcessorInfo, + ProcessWithDocAiPipeline, + RunPipelineMetadata, + RunPipelineResponse, +) from google.cloud.contentwarehouse_v1.types.rule_engine import ( AccessControlAction, Action, @@ -185,6 +202,8 @@ "DocumentSchemaServiceAsyncClient", "DocumentServiceClient", "DocumentServiceAsyncClient", + "PipelineServiceClient", + "PipelineServiceAsyncClient", "RuleSetServiceClient", "RuleSetServiceAsyncClient", "SynonymSetServiceClient", @@ -263,6 +282,15 @@ "HistogramQuery", "HistogramQueryPropertyNameFilter", "HistogramQueryResult", + "RunPipelineRequest", + "ExportToCdwPipeline", + "GcsIngestPipeline", + "GcsIngestWithDocAiProcessorsPipeline", + "IngestPipelineConfig", + "ProcessorInfo", + "ProcessWithDocAiPipeline", + "RunPipelineMetadata", + "RunPipelineResponse", "AccessControlAction", "Action", "ActionExecutorOutput", diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py index 360a0d13ebdd..1a229f7edfcc 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.7.1" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/__init__.py index 267d041782e0..21eb908e8b89 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/__init__.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/__init__.py @@ -27,6 +27,7 @@ DocumentSchemaServiceClient, ) from .services.document_service import DocumentServiceAsyncClient, DocumentServiceClient +from .services.pipeline_service import PipelineServiceAsyncClient, PipelineServiceClient from .services.rule_set_service import RuleSetServiceAsyncClient, RuleSetServiceClient from .services.synonym_set_service import ( SynonymSetServiceAsyncClient, @@ -126,6 +127,17 @@ HistogramQueryPropertyNameFilter, HistogramQueryResult, ) +from .types.pipeline_service import RunPipelineRequest +from .types.pipelines import ( + ExportToCdwPipeline, + GcsIngestPipeline, + GcsIngestWithDocAiProcessorsPipeline, + IngestPipelineConfig, + ProcessorInfo, + ProcessWithDocAiPipeline, + RunPipelineMetadata, + RunPipelineResponse, +) from .types.rule_engine import ( AccessControlAction, Action, @@ -166,6 +178,7 @@ "DocumentLinkServiceAsyncClient", "DocumentSchemaServiceAsyncClient", "DocumentServiceAsyncClient", + "PipelineServiceAsyncClient", "RuleSetServiceAsyncClient", "SynonymSetServiceAsyncClient", "AccessControlAction", @@ -207,11 +220,14 @@ "EnumArray", "EnumTypeOptions", "EnumValue", + "ExportToCdwPipeline", "FetchAclRequest", "FetchAclResponse", "FileTypeFilter", "FloatArray", "FloatTypeOptions", + "GcsIngestPipeline", + "GcsIngestWithDocAiProcessorsPipeline", "GetDocumentRequest", "GetDocumentSchemaRequest", "GetRuleSetRequest", @@ -219,6 +235,7 @@ "HistogramQuery", "HistogramQueryPropertyNameFilter", "HistogramQueryResult", + "IngestPipelineConfig", "IntegerArray", "IntegerTypeOptions", "InvalidRule", @@ -236,6 +253,9 @@ "MapProperty", "MapTypeOptions", "MergeFieldsOptions", + "PipelineServiceClient", + "ProcessWithDocAiPipeline", + "ProcessorInfo", "Property", "PropertyArray", "PropertyDefinition", @@ -253,6 +273,9 @@ "RuleEvaluatorOutput", "RuleSet", "RuleSetServiceClient", + "RunPipelineMetadata", + "RunPipelineRequest", + "RunPipelineResponse", "SearchDocumentsRequest", "SearchDocumentsResponse", "SetAclRequest", diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_metadata.json b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_metadata.json index 6420c7498436..755e453cc255 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_metadata.json +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_metadata.json @@ -317,6 +317,40 @@ } } }, + "PipelineService": { + "clients": { + "grpc": { + "libraryClient": "PipelineServiceClient", + "rpcs": { + "RunPipeline": { + "methods": [ + "run_pipeline" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PipelineServiceAsyncClient", + "rpcs": { + "RunPipeline": { + "methods": [ + "run_pipeline" + ] + } + } + }, + "rest": { + "libraryClient": "PipelineServiceClient", + "rpcs": { + "RunPipeline": { + "methods": [ + "run_pipeline" + ] + } + } + } + } + }, "RuleSetService": { "clients": { "grpc": { diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py index 360a0d13ebdd..1a229f7edfcc 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.7.1" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py index 5278a2c3e18e..e3b76bd8e42b 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py @@ -42,7 +42,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.contentwarehouse_v1.services.document_link_service import pagers diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py index 36b8fec04ef4..a8b6413c270d 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py @@ -46,7 +46,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.contentwarehouse_v1.services.document_link_service import pagers diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/base.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/base.py index 82e3ee853f23..8d63e4c6cf6f 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/base.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/base.py @@ -22,7 +22,7 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc.py index 41adff71048f..9aec99b0ed6c 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc.py @@ -20,7 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc_asyncio.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc_asyncio.py index 5ef2a604f1bd..faacd502f91c 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc_asyncio.py @@ -19,7 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py index 14487451f16c..49f1c1391b6c 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py @@ -26,7 +26,6 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version @@ -37,6 +36,7 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.cloud.contentwarehouse_v1.types import document_link_service diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py index 435c2e5e780f..6923ede0ce7d 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py @@ -42,7 +42,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.contentwarehouse_v1.services.document_schema_service import pagers diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py index 65ef3ac93a7d..9d3758df0c42 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py @@ -46,7 +46,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.contentwarehouse_v1.services.document_schema_service import pagers diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/base.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/base.py index 794fc766c3bb..4701be757823 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/base.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/base.py @@ -22,7 +22,7 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc.py index 30dd74b2e4fe..7343243a381a 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc.py @@ -20,7 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc_asyncio.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc_asyncio.py index 72e8be0e69d6..f24d999d19fa 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc_asyncio.py @@ -19,7 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py index 5d5a9748f49a..1db009f2e9d0 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py @@ -26,7 +26,6 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version @@ -37,6 +36,7 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.cloud.contentwarehouse_v1.types import ( diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/base.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/base.py index 554a23da31c5..92eb7793f32d 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/base.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/base.py @@ -22,7 +22,7 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc.py index 0b2644b1a532..2ecbcb9eaacc 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc.py @@ -20,7 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc_asyncio.py index e7d4fe3d731c..e282ce8c17f2 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc_asyncio.py @@ -19,7 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py index e938bfbf4c93..9e1276a3dc10 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py @@ -26,7 +26,6 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version @@ -37,6 +36,7 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.cloud.contentwarehouse_v1.types import ( diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/__init__.py new file mode 100644 index 000000000000..7e84332ed6dc --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import PipelineServiceAsyncClient +from .client import PipelineServiceClient + +__all__ = ( + "PipelineServiceClient", + "PipelineServiceAsyncClient", +) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py new file mode 100644 index 000000000000..55a9432a6aba --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py @@ -0,0 +1,418 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.contentwarehouse_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.contentwarehouse_v1.types import pipeline_service, pipelines + +from .client import PipelineServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, PipelineServiceTransport +from .transports.grpc_asyncio import PipelineServiceGrpcAsyncIOTransport + + +class PipelineServiceAsyncClient: + """This service lets you manage pipelines.""" + + _client: PipelineServiceClient + + DEFAULT_ENDPOINT = PipelineServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PipelineServiceClient.DEFAULT_MTLS_ENDPOINT + + cloud_function_path = staticmethod(PipelineServiceClient.cloud_function_path) + parse_cloud_function_path = staticmethod( + PipelineServiceClient.parse_cloud_function_path + ) + location_path = staticmethod(PipelineServiceClient.location_path) + parse_location_path = staticmethod(PipelineServiceClient.parse_location_path) + common_billing_account_path = staticmethod( + PipelineServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PipelineServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PipelineServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + PipelineServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + PipelineServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PipelineServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(PipelineServiceClient.common_project_path) + parse_common_project_path = staticmethod( + PipelineServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(PipelineServiceClient.common_location_path) + parse_common_location_path = staticmethod( + PipelineServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PipelineServiceAsyncClient: The constructed client. + """ + return PipelineServiceClient.from_service_account_info.__func__(PipelineServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PipelineServiceAsyncClient: The constructed client. + """ + return PipelineServiceClient.from_service_account_file.__func__(PipelineServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PipelineServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PipelineServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PipelineServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(PipelineServiceClient).get_transport_class, type(PipelineServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, PipelineServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the pipeline service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PipelineServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PipelineServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def run_pipeline( + self, + request: Optional[Union[pipeline_service.RunPipelineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Run a predefined pipeline. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contentwarehouse_v1 + + async def sample_run_pipeline(): + # Create a client + client = contentwarehouse_v1.PipelineServiceAsyncClient() + + # Initialize request argument(s) + request = contentwarehouse_v1.RunPipelineRequest( + name="name_value", + ) + + # Make the request + operation = client.run_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.contentwarehouse_v1.types.RunPipelineRequest, dict]]): + The request object. Request message for + DocumentService.RunPipeline. + name (:class:`str`): + Required. The resource name which owns the resources of + the pipeline. Format: + projects/{project_number}/locations/{location}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contentwarehouse_v1.types.RunPipelineResponse` + Response message of RunPipeline method. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pipeline_service.RunPipelineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_pipeline, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + pipelines.RunPipelineResponse, + metadata_type=pipelines.RunPipelineMetadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PipelineServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PipelineServiceAsyncClient",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py new file mode 100644 index 000000000000..ac1f760b8ce1 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py @@ -0,0 +1,657 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.contentwarehouse_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.contentwarehouse_v1.types import pipeline_service, pipelines + +from .transports.base import DEFAULT_CLIENT_INFO, PipelineServiceTransport +from .transports.grpc import PipelineServiceGrpcTransport +from .transports.grpc_asyncio import PipelineServiceGrpcAsyncIOTransport +from .transports.rest import PipelineServiceRestTransport + + +class PipelineServiceClientMeta(type): + """Metaclass for the PipelineService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PipelineServiceTransport]] + _transport_registry["grpc"] = PipelineServiceGrpcTransport + _transport_registry["grpc_asyncio"] = PipelineServiceGrpcAsyncIOTransport + _transport_registry["rest"] = PipelineServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PipelineServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PipelineServiceClient(metaclass=PipelineServiceClientMeta): + """This service lets you manage pipelines.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "contentwarehouse.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PipelineServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PipelineServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PipelineServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PipelineServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def cloud_function_path( + project: str, + location: str, + function: str, + ) -> str: + """Returns a fully-qualified cloud_function string.""" + return "projects/{project}/locations/{location}/functions/{function}".format( + project=project, + location=location, + function=function, + ) + + @staticmethod + def parse_cloud_function_path(path: str) -> Dict[str, str]: + """Parses a cloud_function path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/functions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_location_path(path: str) -> Dict[str, str]: + """Parses a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, PipelineServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the pipeline service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PipelineServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PipelineServiceTransport): + # transport is a PipelineServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def run_pipeline( + self, + request: Optional[Union[pipeline_service.RunPipelineRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Run a predefined pipeline. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import contentwarehouse_v1 + + def sample_run_pipeline(): + # Create a client + client = contentwarehouse_v1.PipelineServiceClient() + + # Initialize request argument(s) + request = contentwarehouse_v1.RunPipelineRequest( + name="name_value", + ) + + # Make the request + operation = client.run_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.contentwarehouse_v1.types.RunPipelineRequest, dict]): + The request object. Request message for + DocumentService.RunPipeline. + name (str): + Required. The resource name which owns the resources of + the pipeline. Format: + projects/{project_number}/locations/{location}. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.contentwarehouse_v1.types.RunPipelineResponse` + Response message of RunPipeline method. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pipeline_service.RunPipelineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pipeline_service.RunPipelineRequest): + request = pipeline_service.RunPipelineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_pipeline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + pipelines.RunPipelineResponse, + metadata_type=pipelines.RunPipelineMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PipelineServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PipelineServiceClient",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/__init__.py new file mode 100644 index 000000000000..c01b35313856 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PipelineServiceTransport +from .grpc import PipelineServiceGrpcTransport +from .grpc_asyncio import PipelineServiceGrpcAsyncIOTransport +from .rest import PipelineServiceRestInterceptor, PipelineServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PipelineServiceTransport]] +_transport_registry["grpc"] = PipelineServiceGrpcTransport +_transport_registry["grpc_asyncio"] = PipelineServiceGrpcAsyncIOTransport +_transport_registry["rest"] = PipelineServiceRestTransport + +__all__ = ( + "PipelineServiceTransport", + "PipelineServiceGrpcTransport", + "PipelineServiceGrpcAsyncIOTransport", + "PipelineServiceRestTransport", + "PipelineServiceRestInterceptor", +) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/base.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/base.py new file mode 100644 index 000000000000..047e3f1168cb --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/base.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.contentwarehouse_v1 import gapic_version as package_version +from google.cloud.contentwarehouse_v1.types import pipeline_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class PipelineServiceTransport(abc.ABC): + """Abstract transport class for PipelineService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "contentwarehouse.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.run_pipeline: gapic_v1.method.wrap_method( + self.run_pipeline, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=120.0, + ), + default_timeout=120.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def run_pipeline( + self, + ) -> Callable[ + [pipeline_service.RunPipelineRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("PipelineServiceTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc.py new file mode 100644 index 000000000000..55cdefdaac1d --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.contentwarehouse_v1.types import pipeline_service + +from .base import DEFAULT_CLIENT_INFO, PipelineServiceTransport + + +class PipelineServiceGrpcTransport(PipelineServiceTransport): + """gRPC backend transport for PipelineService. + + This service lets you manage pipelines. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "contentwarehouse.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "contentwarehouse.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def run_pipeline( + self, + ) -> Callable[[pipeline_service.RunPipelineRequest], operations_pb2.Operation]: + r"""Return a callable for the run pipeline method over gRPC. + + Run a predefined pipeline. + + Returns: + Callable[[~.RunPipelineRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_pipeline" not in self._stubs: + self._stubs["run_pipeline"] = self.grpc_channel.unary_unary( + "/google.cloud.contentwarehouse.v1.PipelineService/RunPipeline", + request_serializer=pipeline_service.RunPipelineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["run_pipeline"] + + def close(self): + self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PipelineServiceGrpcTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc_asyncio.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..f309315b1568 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc_asyncio.py @@ -0,0 +1,301 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.contentwarehouse_v1.types import pipeline_service + +from .base import DEFAULT_CLIENT_INFO, PipelineServiceTransport +from .grpc import PipelineServiceGrpcTransport + + +class PipelineServiceGrpcAsyncIOTransport(PipelineServiceTransport): + """gRPC AsyncIO backend transport for PipelineService. + + This service lets you manage pipelines. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "contentwarehouse.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "contentwarehouse.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def run_pipeline( + self, + ) -> Callable[ + [pipeline_service.RunPipelineRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the run pipeline method over gRPC. + + Run a predefined pipeline. + + Returns: + Callable[[~.RunPipelineRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_pipeline" not in self._stubs: + self._stubs["run_pipeline"] = self.grpc_channel.unary_unary( + "/google.cloud.contentwarehouse.v1.PipelineService/RunPipeline", + request_serializer=pipeline_service.RunPipelineRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["run_pipeline"] + + def close(self): + return self.grpc_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + +__all__ = ("PipelineServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest.py new file mode 100644 index 000000000000..862059068e11 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.contentwarehouse_v1.types import pipeline_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import PipelineServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PipelineServiceRestInterceptor: + """Interceptor for PipelineService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PipelineServiceRestTransport. + + .. code-block:: python + class MyCustomPipelineServiceInterceptor(PipelineServiceRestInterceptor): + def pre_run_pipeline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_pipeline(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PipelineServiceRestTransport(interceptor=MyCustomPipelineServiceInterceptor()) + client = PipelineServiceClient(transport=transport) + + + """ + + def pre_run_pipeline( + self, + request: pipeline_service.RunPipelineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[pipeline_service.RunPipelineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_pipeline + + Override in a subclass to manipulate the request or metadata + before they are sent to the PipelineService server. + """ + return request, metadata + + def post_run_pipeline( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for run_pipeline + + Override in a subclass to manipulate the response + after it is returned by the PipelineService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the PipelineService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the PipelineService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PipelineServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PipelineServiceRestInterceptor + + +class PipelineServiceRestTransport(PipelineServiceTransport): + """REST backend transport for PipelineService. + + This service lets you manage pipelines. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "contentwarehouse.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PipelineServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PipelineServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _RunPipeline(PipelineServiceRestStub): + def __hash__(self): + return hash("RunPipeline") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pipeline_service.RunPipelineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the run pipeline method over HTTP. + + Args: + request (~.pipeline_service.RunPipelineRequest): + The request object. Request message for + DocumentService.RunPipeline. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*}:runPipeline", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_run_pipeline(request, metadata) + pb_request = pipeline_service.RunPipelineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_pipeline(resp) + return resp + + @property + def run_pipeline( + self, + ) -> Callable[[pipeline_service.RunPipelineRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunPipeline(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(PipelineServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PipelineServiceRestTransport",) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py index d36a9a9bedaa..e0e4a85980f8 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py @@ -42,7 +42,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.contentwarehouse_v1.services.rule_set_service import pagers from google.cloud.contentwarehouse_v1.types import rule_engine, ruleset_service_request diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py index c1522389dca9..50e40ff15361 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py @@ -46,7 +46,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.contentwarehouse_v1.services.rule_set_service import pagers from google.cloud.contentwarehouse_v1.types import rule_engine, ruleset_service_request diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/base.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/base.py index 9de74ca66f3b..55d00a851fc5 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/base.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/base.py @@ -22,7 +22,7 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc.py index e5c950f0741e..664cc40d0326 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc.py @@ -20,7 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc_asyncio.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc_asyncio.py index f7b739f1b899..07c6df17c914 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc_asyncio.py @@ -19,7 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py index 5455d3cef287..1be1d06fcb92 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py @@ -26,7 +26,6 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version @@ -37,6 +36,7 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.cloud.contentwarehouse_v1.types import rule_engine, ruleset_service_request diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py index 3dd4c3a86351..e59a7124b1b6 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py @@ -42,7 +42,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.contentwarehouse_v1.services.synonym_set_service import pagers from google.cloud.contentwarehouse_v1.types import ( @@ -293,8 +293,9 @@ async def sample_create_synonym_set(): google.cloud.contentwarehouse_v1.types.SynonymSet: Represents a list of synonyms for a given context. For example a context - "sales" could contain: Synonym 1: sale, - invoice, bill, order + "sales" could contain: + + Synonym 1: sale, invoice, bill, order Synonym 2: money, credit, finance, payment Synonym 3: shipping, freight, transport @@ -407,8 +408,9 @@ async def sample_get_synonym_set(): google.cloud.contentwarehouse_v1.types.SynonymSet: Represents a list of synonyms for a given context. For example a context - "sales" could contain: Synonym 1: sale, - invoice, bill, order + "sales" could contain: + + Synonym 1: sale, invoice, bill, order Synonym 2: money, credit, finance, payment Synonym 3: shipping, freight, transport @@ -539,8 +541,9 @@ async def sample_update_synonym_set(): google.cloud.contentwarehouse_v1.types.SynonymSet: Represents a list of synonyms for a given context. For example a context - "sales" could contain: Synonym 1: sale, - invoice, bill, order + "sales" could contain: + + Synonym 1: sale, invoice, bill, order Synonym 2: money, credit, finance, payment Synonym 3: shipping, freight, transport diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py index 038c6cc65924..4c2f79cba115 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py @@ -46,7 +46,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.contentwarehouse_v1.services.synonym_set_service import pagers from google.cloud.contentwarehouse_v1.types import ( @@ -535,8 +535,9 @@ def sample_create_synonym_set(): google.cloud.contentwarehouse_v1.types.SynonymSet: Represents a list of synonyms for a given context. For example a context - "sales" could contain: Synonym 1: sale, - invoice, bill, order + "sales" could contain: + + Synonym 1: sale, invoice, bill, order Synonym 2: money, credit, finance, payment Synonym 3: shipping, freight, transport @@ -649,8 +650,9 @@ def sample_get_synonym_set(): google.cloud.contentwarehouse_v1.types.SynonymSet: Represents a list of synonyms for a given context. For example a context - "sales" could contain: Synonym 1: sale, - invoice, bill, order + "sales" could contain: + + Synonym 1: sale, invoice, bill, order Synonym 2: money, credit, finance, payment Synonym 3: shipping, freight, transport @@ -772,8 +774,9 @@ def sample_update_synonym_set(): google.cloud.contentwarehouse_v1.types.SynonymSet: Represents a list of synonyms for a given context. For example a context - "sales" could contain: Synonym 1: sale, - invoice, bill, order + "sales" could contain: + + Synonym 1: sale, invoice, bill, order Synonym 2: money, credit, finance, payment Synonym 3: shipping, freight, transport diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/base.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/base.py index 80a63be19a5f..3767e483e219 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/base.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/base.py @@ -22,7 +22,7 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py index 2034667c182a..cb3092a74034 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py @@ -20,7 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc_asyncio.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc_asyncio.py index cac1d717d737..d432fc2c7ae1 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc_asyncio.py @@ -19,7 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py index 7fde208604c2..6f57deafcf74 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py @@ -26,7 +26,6 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version @@ -37,6 +36,7 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.cloud.contentwarehouse_v1.types import ( @@ -383,8 +383,9 @@ def __call__( ~.synonymset.SynonymSet: Represents a list of synonyms for a given context. For example a context - "sales" could contain: Synonym 1: sale, - invoice, bill, order + "sales" could contain: + + Synonym 1: sale, invoice, bill, order Synonym 2: money, credit, finance, payment Synonym 3: shipping, freight, transport @@ -567,8 +568,9 @@ def __call__( ~.synonymset.SynonymSet: Represents a list of synonyms for a given context. For example a context - "sales" could contain: Synonym 1: sale, - invoice, bill, order + "sales" could contain: + + Synonym 1: sale, invoice, bill, order Synonym 2: money, credit, finance, payment Synonym 3: shipping, freight, transport @@ -758,8 +760,9 @@ def __call__( ~.synonymset.SynonymSet: Represents a list of synonyms for a given context. For example a context - "sales" could contain: Synonym 1: sale, - invoice, bill, order + "sales" could contain: + + Synonym 1: sale, invoice, bill, order Synonym 2: money, credit, finance, payment Synonym 3: shipping, freight, transport diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/__init__.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/__init__.py index e9a9eb7bb511..01fb52e5794a 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/__init__.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/__init__.py @@ -107,6 +107,17 @@ HistogramQueryPropertyNameFilter, HistogramQueryResult, ) +from .pipeline_service import RunPipelineRequest +from .pipelines import ( + ExportToCdwPipeline, + GcsIngestPipeline, + GcsIngestWithDocAiProcessorsPipeline, + IngestPipelineConfig, + ProcessorInfo, + ProcessWithDocAiPipeline, + RunPipelineMetadata, + RunPipelineResponse, +) from .rule_engine import ( AccessControlAction, Action, @@ -218,6 +229,15 @@ "HistogramQuery", "HistogramQueryPropertyNameFilter", "HistogramQueryResult", + "RunPipelineRequest", + "ExportToCdwPipeline", + "GcsIngestPipeline", + "GcsIngestWithDocAiProcessorsPipeline", + "IngestPipelineConfig", + "ProcessorInfo", + "ProcessWithDocAiPipeline", + "RunPipelineMetadata", + "RunPipelineResponse", "AccessControlAction", "Action", "ActionExecutorOutput", diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document.py index 4759bd3cfb72..7f4f054f8ee9 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document.py @@ -180,6 +180,13 @@ class Document(proto.Message): The user who creates the document. updater (str): The user who lastly updates the document. + disposition_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If linked to a Collection with + RetentionPolicy, the date when the document + becomes mutable. + legal_hold (bool): + Output only. Indicates if the document has a + legal hold on it. """ name: str = proto.Field( @@ -276,6 +283,15 @@ class Document(proto.Message): proto.STRING, number=14, ) + disposition_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=22, + message=timestamp_pb2.Timestamp, + ) + legal_hold: bool = proto.Field( + proto.BOOL, + number=23, + ) class DocumentReference(proto.Message): @@ -304,6 +320,10 @@ class DocumentReference(proto.Message): delete_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the document is deleted. + document_is_retention_folder (bool): + Document is a folder with retention policy. + document_is_legal_hold_folder (bool): + Document is a folder with legal hold. """ document_name: str = proto.Field( @@ -337,6 +357,14 @@ class DocumentReference(proto.Message): number=7, message=timestamp_pb2.Timestamp, ) + document_is_retention_folder: bool = proto.Field( + proto.BOOL, + number=8, + ) + document_is_legal_hold_folder: bool = proto.Field( + proto.BOOL, + number=9, + ) class Property(proto.Message): diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_link_service.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_link_service.py index f2cc3e38f993..ca260a8fafb3 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_link_service.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_link_service.py @@ -127,6 +127,7 @@ class ListLinkedSourcesRequest(proto.Message): The maximum number of document-links to return. The service may return fewer than this value. + If unspecified, at most 50 document-links will be returned. The maximum value is 1000; values above 1000 will be coerced to 1000. diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py index 31285f9e9462..c1cd265e68d3 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py @@ -189,6 +189,10 @@ class SearchDocumentsResponse(proto.Message): histogram_query_results (MutableSequence[google.cloud.contentwarehouse_v1.types.HistogramQueryResult]): The histogram results that match with the specified [SearchDocumentsRequest.histogram_queries][google.cloud.contentwarehouse.v1.SearchDocumentsRequest.histogram_queries]. + question_answer (str): + Experimental. + Question answer from the query against the + document. """ class MatchingDocument(proto.Message): @@ -262,6 +266,10 @@ def raw_page(self): number=6, message=histogram.HistogramQueryResult, ) + question_answer: str = proto.Field( + proto.STRING, + number=7, + ) class FetchAclResponse(proto.Message): diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/filters.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/filters.py index 05f6adb52c08..2922f160cfc8 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/filters.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/filters.py @@ -150,6 +150,9 @@ class DocumentQuery(proto.Message): Search all the documents under this specified folder. Format: projects/{project_number}/locations/{location}/documents/{document_id}. + document_name_filter (MutableSequence[str]): + Search the documents in the list. Format: + projects/{project_number}/locations/{location}/documents/{document_id}. query_context (MutableSequence[str]): For custom synonyms. Customers provide the synonyms based on context. @@ -214,6 +217,10 @@ class DocumentQuery(proto.Message): proto.STRING, number=9, ) + document_name_filter: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) query_context: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=10, @@ -251,10 +258,13 @@ class TimeField(proto.Enum): Earliest document create time. UPDATE_TIME (2): Latest document update time. + DISPOSITION_TIME (3): + Time when document becomes mutable again. """ TIME_FIELD_UNSPECIFIED = 0 CREATE_TIME = 1 UPDATE_TIME = 2 + DISPOSITION_TIME = 3 time_range: interval_pb2.Interval = proto.Field( proto.MESSAGE, @@ -357,11 +367,14 @@ class FileType(proto.Enum): Returns only folders. DOCUMENT (3): Returns only non-folder documents. + ROOT_FOLDER (4): + Returns only root folders """ FILE_TYPE_UNSPECIFIED = 0 ALL = 1 FOLDER = 2 DOCUMENT = 3 + ROOT_FOLDER = 4 file_type: FileType = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/histogram.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/histogram.py index 0635422b2c8e..d30c2673810e 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/histogram.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/histogram.py @@ -44,6 +44,7 @@ class HistogramQuery(proto.Message): Controls if the histogram query requires the return of a precise count. Enable this flag may adversely impact performance. + Defaults to true. filters (google.cloud.contentwarehouse_v1.types.HistogramQueryPropertyNameFilter): Optional. Filter the result of histogram diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/pipeline_service.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/pipeline_service.py new file mode 100644 index 000000000000..b11b927f50c1 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/pipeline_service.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.contentwarehouse_v1.types import common, pipelines + +__protobuf__ = proto.module( + package="google.cloud.contentwarehouse.v1", + manifest={ + "RunPipelineRequest", + }, +) + + +class RunPipelineRequest(proto.Message): + r"""Request message for DocumentService.RunPipeline. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name which owns the resources of the + pipeline. Format: + projects/{project_number}/locations/{location}. + gcs_ingest_pipeline (google.cloud.contentwarehouse_v1.types.GcsIngestPipeline): + Cloud Storage ingestion pipeline. + + This field is a member of `oneof`_ ``pipeline``. + gcs_ingest_with_doc_ai_processors_pipeline (google.cloud.contentwarehouse_v1.types.GcsIngestWithDocAiProcessorsPipeline): + Use DocAI processors to process documents in + Cloud Storage and ingest them to Document + Warehouse. + + This field is a member of `oneof`_ ``pipeline``. + export_cdw_pipeline (google.cloud.contentwarehouse_v1.types.ExportToCdwPipeline): + Export docuemnts from Document Warehouse to + CDW for training purpose. + + This field is a member of `oneof`_ ``pipeline``. + process_with_doc_ai_pipeline (google.cloud.contentwarehouse_v1.types.ProcessWithDocAiPipeline): + Use a DocAI processor to process documents in + Document Warehouse, and re-ingest the updated + results into Document Warehouse. + + This field is a member of `oneof`_ ``pipeline``. + request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata): + The meta information collected about the end + user, used to enforce access control for the + service. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + gcs_ingest_pipeline: pipelines.GcsIngestPipeline = proto.Field( + proto.MESSAGE, + number=2, + oneof="pipeline", + message=pipelines.GcsIngestPipeline, + ) + gcs_ingest_with_doc_ai_processors_pipeline: pipelines.GcsIngestWithDocAiProcessorsPipeline = proto.Field( + proto.MESSAGE, + number=3, + oneof="pipeline", + message=pipelines.GcsIngestWithDocAiProcessorsPipeline, + ) + export_cdw_pipeline: pipelines.ExportToCdwPipeline = proto.Field( + proto.MESSAGE, + number=4, + oneof="pipeline", + message=pipelines.ExportToCdwPipeline, + ) + process_with_doc_ai_pipeline: pipelines.ProcessWithDocAiPipeline = proto.Field( + proto.MESSAGE, + number=5, + oneof="pipeline", + message=pipelines.ProcessWithDocAiPipeline, + ) + request_metadata: common.RequestMetadata = proto.Field( + proto.MESSAGE, + number=6, + message=common.RequestMetadata, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/pipelines.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/pipelines.py new file mode 100644 index 000000000000..ff98cfd91797 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/pipelines.py @@ -0,0 +1,528 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.iam.v1 import policy_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.contentwarehouse_v1.types import common + +__protobuf__ = proto.module( + package="google.cloud.contentwarehouse.v1", + manifest={ + "RunPipelineResponse", + "RunPipelineMetadata", + "ProcessorInfo", + "IngestPipelineConfig", + "GcsIngestPipeline", + "GcsIngestWithDocAiProcessorsPipeline", + "ExportToCdwPipeline", + "ProcessWithDocAiPipeline", + }, +) + + +class RunPipelineResponse(proto.Message): + r"""Response message of RunPipeline method.""" + + +class RunPipelineMetadata(proto.Message): + r"""Metadata message of RunPipeline method. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + total_file_count (int): + Number of files that were processed by the + pipeline. + failed_file_count (int): + Number of files that have failed at some + point in the pipeline. + user_info (google.cloud.contentwarehouse_v1.types.UserInfo): + User unique identification and groups + information. + gcs_ingest_pipeline_metadata (google.cloud.contentwarehouse_v1.types.RunPipelineMetadata.GcsIngestPipelineMetadata): + The pipeline metadata for GcsIngest pipeline. + + This field is a member of `oneof`_ ``pipeline_metadata``. + export_to_cdw_pipeline_metadata (google.cloud.contentwarehouse_v1.types.RunPipelineMetadata.ExportToCdwPipelineMetadata): + The pipeline metadata for Export-to-CDW + pipeline. + + This field is a member of `oneof`_ ``pipeline_metadata``. + process_with_doc_ai_pipeline_metadata (google.cloud.contentwarehouse_v1.types.RunPipelineMetadata.ProcessWithDocAiPipelineMetadata): + The pipeline metadata for Process-with-DocAi + pipeline. + + This field is a member of `oneof`_ ``pipeline_metadata``. + individual_document_statuses (MutableSequence[google.cloud.contentwarehouse_v1.types.RunPipelineMetadata.IndividualDocumentStatus]): + The list of response details of each + document. + """ + + class GcsIngestPipelineMetadata(proto.Message): + r"""The metadata message for GcsIngest pipeline. + + Attributes: + input_path (str): + The input Cloud Storage folder in this pipeline. Format: + ``gs:///``. + """ + + input_path: str = proto.Field( + proto.STRING, + number=1, + ) + + class ExportToCdwPipelineMetadata(proto.Message): + r"""The metadata message for Export-to-CDW pipeline. + + Attributes: + documents (MutableSequence[str]): + The input list of all the resource names of + the documents to be exported. + doc_ai_dataset (str): + The output CDW dataset resource name. + output_path (str): + The output Cloud Storage folder in this + pipeline. + """ + + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + doc_ai_dataset: str = proto.Field( + proto.STRING, + number=2, + ) + output_path: str = proto.Field( + proto.STRING, + number=3, + ) + + class ProcessWithDocAiPipelineMetadata(proto.Message): + r"""The metadata message for Process-with-DocAi pipeline. + + Attributes: + documents (MutableSequence[str]): + The input list of all the resource names of + the documents to be processed. + processor_info (google.cloud.contentwarehouse_v1.types.ProcessorInfo): + The DocAI processor to process the documents + with. + """ + + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + processor_info: "ProcessorInfo" = proto.Field( + proto.MESSAGE, + number=2, + message="ProcessorInfo", + ) + + class IndividualDocumentStatus(proto.Message): + r"""The status of processing a document. + + Attributes: + document_id (str): + Document identifier of an existing document. + status (google.rpc.status_pb2.Status): + The status processing the document. + """ + + document_id: str = proto.Field( + proto.STRING, + number=1, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + total_file_count: int = proto.Field( + proto.INT32, + number=1, + ) + failed_file_count: int = proto.Field( + proto.INT32, + number=2, + ) + user_info: common.UserInfo = proto.Field( + proto.MESSAGE, + number=3, + message=common.UserInfo, + ) + gcs_ingest_pipeline_metadata: GcsIngestPipelineMetadata = proto.Field( + proto.MESSAGE, + number=4, + oneof="pipeline_metadata", + message=GcsIngestPipelineMetadata, + ) + export_to_cdw_pipeline_metadata: ExportToCdwPipelineMetadata = proto.Field( + proto.MESSAGE, + number=6, + oneof="pipeline_metadata", + message=ExportToCdwPipelineMetadata, + ) + process_with_doc_ai_pipeline_metadata: ProcessWithDocAiPipelineMetadata = ( + proto.Field( + proto.MESSAGE, + number=7, + oneof="pipeline_metadata", + message=ProcessWithDocAiPipelineMetadata, + ) + ) + individual_document_statuses: MutableSequence[ + IndividualDocumentStatus + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=IndividualDocumentStatus, + ) + + +class ProcessorInfo(proto.Message): + r"""The DocAI processor information. + + Attributes: + processor_name (str): + The processor resource name. Format is + ``projects/{project}/locations/{location}/processors/{processor}``, + or + ``projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}`` + document_type (str): + The processor will process the documents with + this document type. + schema_name (str): + The Document schema resource name. All documents processed + by this processor will use this schema. Format: + projects/{project_number}/locations/{location}/documentSchemas/{document_schema_id}. + """ + + processor_name: str = proto.Field( + proto.STRING, + number=1, + ) + document_type: str = proto.Field( + proto.STRING, + number=2, + ) + schema_name: str = proto.Field( + proto.STRING, + number=3, + ) + + +class IngestPipelineConfig(proto.Message): + r"""The ingestion pipeline config. + + Attributes: + document_acl_policy (google.iam.v1.policy_pb2.Policy): + The document level acl policy config. This refers to an + Identity and Access (IAM) policy, which specifies access + controls for all documents ingested by the pipeline. The + [role][google.iam.v1.Binding.role] and + [members][google.iam.v1.Binding.role] under the policy needs + to be specified. + + The following roles are supported for document level acl + control: + + - roles/contentwarehouse.documentAdmin + - roles/contentwarehouse.documentEditor + - roles/contentwarehouse.documentViewer + + The following members are supported for document level acl + control: + + - user:user-email@example.com + - group:group-email@example.com Note that for documents + searched with LLM, only single level user or group acl + check is supported. + enable_document_text_extraction (bool): + The document text extraction enabled flag. + If the flag is set to true, DWH will perform + text extraction on the raw document. + folder (str): + Optional. The name of the folder to which all ingested + documents will be linked during ingestion process. Format is + ``projects/{project}/locations/{location}/documents/{folder_id}`` + cloud_function (str): + The Cloud Function resource name. The Cloud Function needs + to live inside consumer project and is accessible to + Document AI Warehouse P4SA. Only Cloud Functions V2 is + supported. Cloud function execution should complete within 5 + minutes or this file ingestion may fail due to timeout. + Format: + ``https://{region}-{project_id}.cloudfunctions.net/{cloud_function}`` + The following keys are available the request json payload. + + - display_name + - properties + - plain_text + - reference_id + - document_schema_name + - raw_document_path + - raw_document_file_type + + The following keys from the cloud function json response + payload will be ingested to the Document AI Warehouse as + part of Document proto content and/or related information. + The original values will be overridden if any key is present + in the response. + + - display_name + - properties + - plain_text + - document_acl_policy + - folder + """ + + document_acl_policy: policy_pb2.Policy = proto.Field( + proto.MESSAGE, + number=1, + message=policy_pb2.Policy, + ) + enable_document_text_extraction: bool = proto.Field( + proto.BOOL, + number=2, + ) + folder: str = proto.Field( + proto.STRING, + number=3, + ) + cloud_function: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GcsIngestPipeline(proto.Message): + r"""The configuration of the Cloud Storage Ingestion pipeline. + + Attributes: + input_path (str): + The input Cloud Storage folder. All files under this folder + will be imported to Document Warehouse. Format: + ``gs:///``. + schema_name (str): + The Document Warehouse schema resource name. All documents + processed by this pipeline will use this schema. Format: + projects/{project_number}/locations/{location}/documentSchemas/{document_schema_id}. + processor_type (str): + The Doc AI processor type name. Only used + when the format of ingested files is Doc AI + Document proto format. + skip_ingested_documents (bool): + The flag whether to skip ingested documents. + If it is set to true, documents in Cloud Storage + contains key "status" with value + "status=ingested" in custom metadata will be + skipped to ingest. + pipeline_config (google.cloud.contentwarehouse_v1.types.IngestPipelineConfig): + Optional. The config for the Cloud Storage + Ingestion pipeline. It provides additional + customization options to run the pipeline and + can be skipped if it is not applicable. + """ + + input_path: str = proto.Field( + proto.STRING, + number=1, + ) + schema_name: str = proto.Field( + proto.STRING, + number=2, + ) + processor_type: str = proto.Field( + proto.STRING, + number=3, + ) + skip_ingested_documents: bool = proto.Field( + proto.BOOL, + number=4, + ) + pipeline_config: "IngestPipelineConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="IngestPipelineConfig", + ) + + +class GcsIngestWithDocAiProcessorsPipeline(proto.Message): + r"""The configuration of the Cloud Storage Ingestion with DocAI + Processors pipeline. + + Attributes: + input_path (str): + The input Cloud Storage folder. All files under this folder + will be imported to Document Warehouse. Format: + ``gs:///``. + split_classify_processor_info (google.cloud.contentwarehouse_v1.types.ProcessorInfo): + The split and classify processor information. + The split and classify result will be used to + find a matched extract processor. + extract_processor_infos (MutableSequence[google.cloud.contentwarehouse_v1.types.ProcessorInfo]): + The extract processors information. + One matched extract processor will be used to + process documents based on the classify + processor result. If no classify processor is + specified, the first extract processor will be + used. + processor_results_folder_path (str): + The Cloud Storage folder path used to store the raw results + from processors. Format: + ``gs:///``. + skip_ingested_documents (bool): + The flag whether to skip ingested documents. + If it is set to true, documents in Cloud Storage + contains key "status" with value + "status=ingested" in custom metadata will be + skipped to ingest. + pipeline_config (google.cloud.contentwarehouse_v1.types.IngestPipelineConfig): + Optional. The config for the Cloud Storage + Ingestion with DocAI Processors pipeline. It + provides additional customization options to run + the pipeline and can be skipped if it is not + applicable. + """ + + input_path: str = proto.Field( + proto.STRING, + number=1, + ) + split_classify_processor_info: "ProcessorInfo" = proto.Field( + proto.MESSAGE, + number=2, + message="ProcessorInfo", + ) + extract_processor_infos: MutableSequence["ProcessorInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="ProcessorInfo", + ) + processor_results_folder_path: str = proto.Field( + proto.STRING, + number=4, + ) + skip_ingested_documents: bool = proto.Field( + proto.BOOL, + number=5, + ) + pipeline_config: "IngestPipelineConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="IngestPipelineConfig", + ) + + +class ExportToCdwPipeline(proto.Message): + r"""The configuration of exporting documents from the Document + Warehouse to CDW pipeline. + + Attributes: + documents (MutableSequence[str]): + The list of all the resource names of the documents to be + processed. Format: + projects/{project_number}/locations/{location}/documents/{document_id}. + export_folder_path (str): + The Cloud Storage folder path used to store the exported + documents before being sent to CDW. Format: + ``gs:///``. + doc_ai_dataset (str): + Optional. The CDW dataset resource name. This + field is optional. If not set, the documents + will be exported to Cloud Storage only. Format: + + projects/{project}/locations/{location}/processors/{processor}/dataset + training_split_ratio (float): + Ratio of training dataset split. When importing into + Document AI Workbench, documents will be automatically split + into training and test split category with the specified + ratio. This field is required if doc_ai_dataset is set. + """ + + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + export_folder_path: str = proto.Field( + proto.STRING, + number=2, + ) + doc_ai_dataset: str = proto.Field( + proto.STRING, + number=3, + ) + training_split_ratio: float = proto.Field( + proto.FLOAT, + number=4, + ) + + +class ProcessWithDocAiPipeline(proto.Message): + r"""The configuration of processing documents in Document + Warehouse with DocAi processors pipeline. + + Attributes: + documents (MutableSequence[str]): + The list of all the resource names of the documents to be + processed. Format: + projects/{project_number}/locations/{location}/documents/{document_id}. + export_folder_path (str): + The Cloud Storage folder path used to store the exported + documents before being sent to CDW. Format: + ``gs:///``. + processor_info (google.cloud.contentwarehouse_v1.types.ProcessorInfo): + The CDW processor information. + processor_results_folder_path (str): + The Cloud Storage folder path used to store the raw results + from processors. Format: + ``gs:///``. + """ + + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + export_folder_path: str = proto.Field( + proto.STRING, + number=2, + ) + processor_info: "ProcessorInfo" = proto.Field( + proto.MESSAGE, + number=3, + message="ProcessorInfo", + ) + processor_results_folder_path: str = proto.Field( + proto.STRING, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/rule_engine.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/rule_engine.py index 343578bf6d17..e5515c19201c 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/rule_engine.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/rule_engine.py @@ -113,10 +113,16 @@ class TriggerType(proto.Enum): Trigger for create document action. ON_UPDATE (4): Trigger for update document action. + ON_CREATE_LINK (7): + Trigger for create link action. + ON_DELETE_LINK (8): + Trigger for delete link action. """ UNKNOWN = 0 ON_CREATE = 1 ON_UPDATE = 4 + ON_CREATE_LINK = 7 + ON_DELETE_LINK = 8 description: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset.py index cc22e669c5d8..466033947f56 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/synonymset.py @@ -30,6 +30,7 @@ class SynonymSet(proto.Message): r"""Represents a list of synonyms for a given context. For example a context "sales" could contain: + Synonym 1: sale, invoice, bill, order Synonym 2: money, credit, finance, payment Synonym 3: shipping, freight, transport diff --git a/packages/google-cloud-contentwarehouse/noxfile.py b/packages/google-cloud-contentwarehouse/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-contentwarehouse/noxfile.py +++ b/packages/google-cloud-contentwarehouse/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_async.py b/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_async.py new file mode 100644 index 000000000000..4dc4b4a1eb21 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunPipeline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contentwarehouse + + +# [START contentwarehouse_v1_generated_PipelineService_RunPipeline_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contentwarehouse_v1 + + +async def sample_run_pipeline(): + # Create a client + client = contentwarehouse_v1.PipelineServiceAsyncClient() + + # Initialize request argument(s) + request = contentwarehouse_v1.RunPipelineRequest( + name="name_value", + ) + + # Make the request + operation = client.run_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END contentwarehouse_v1_generated_PipelineService_RunPipeline_async] diff --git a/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_sync.py b/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_sync.py new file mode 100644 index 000000000000..e8dbed8b7c08 --- /dev/null +++ b/packages/google-cloud-contentwarehouse/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunPipeline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-contentwarehouse + + +# [START contentwarehouse_v1_generated_PipelineService_RunPipeline_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import contentwarehouse_v1 + + +def sample_run_pipeline(): + # Create a client + client = contentwarehouse_v1.PipelineServiceClient() + + # Initialize request argument(s) + request = contentwarehouse_v1.RunPipelineRequest( + name="name_value", + ) + + # Make the request + operation = client.run_pipeline(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END contentwarehouse_v1_generated_PipelineService_RunPipeline_sync] diff --git a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json index 023fb3e96007..4d8ced4c0eb8 100644 --- a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json +++ b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contentwarehouse", - "version": "0.1.0" + "version": "0.7.1" }, "snippets": [ { @@ -2778,6 +2778,167 @@ ], "title": "contentwarehouse_v1_generated_document_service_update_document_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.contentwarehouse_v1.PipelineServiceAsyncClient", + "shortName": "PipelineServiceAsyncClient" + }, + "fullName": "google.cloud.contentwarehouse_v1.PipelineServiceAsyncClient.run_pipeline", + "method": { + "fullName": "google.cloud.contentwarehouse.v1.PipelineService.RunPipeline", + "service": { + "fullName": "google.cloud.contentwarehouse.v1.PipelineService", + "shortName": "PipelineService" + }, + "shortName": "RunPipeline" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contentwarehouse_v1.types.RunPipelineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "run_pipeline" + }, + "description": "Sample for RunPipeline", + "file": "contentwarehouse_v1_generated_pipeline_service_run_pipeline_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contentwarehouse_v1_generated_PipelineService_RunPipeline_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contentwarehouse_v1_generated_pipeline_service_run_pipeline_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.contentwarehouse_v1.PipelineServiceClient", + "shortName": "PipelineServiceClient" + }, + "fullName": "google.cloud.contentwarehouse_v1.PipelineServiceClient.run_pipeline", + "method": { + "fullName": "google.cloud.contentwarehouse.v1.PipelineService.RunPipeline", + "service": { + "fullName": "google.cloud.contentwarehouse.v1.PipelineService", + "shortName": "PipelineService" + }, + "shortName": "RunPipeline" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.contentwarehouse_v1.types.RunPipelineRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "run_pipeline" + }, + "description": "Sample for RunPipeline", + "file": "contentwarehouse_v1_generated_pipeline_service_run_pipeline_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "contentwarehouse_v1_generated_PipelineService_RunPipeline_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "contentwarehouse_v1_generated_pipeline_service_run_pipeline_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-contentwarehouse/scripts/decrypt-secrets.sh b/packages/google-cloud-contentwarehouse/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-contentwarehouse/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-contentwarehouse/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-contentwarehouse/scripts/fixup_contentwarehouse_v1_keywords.py b/packages/google-cloud-contentwarehouse/scripts/fixup_contentwarehouse_v1_keywords.py index b2bb1f168d98..8509128f90a3 100644 --- a/packages/google-cloud-contentwarehouse/scripts/fixup_contentwarehouse_v1_keywords.py +++ b/packages/google-cloud-contentwarehouse/scripts/fixup_contentwarehouse_v1_keywords.py @@ -60,6 +60,7 @@ class contentwarehouseCallTransformer(cst.CSTTransformer): 'list_rule_sets': ('parent', 'page_size', 'page_token', ), 'list_synonym_sets': ('parent', 'page_size', 'page_token', ), 'lock_document': ('name', 'collection_id', 'locking_user', ), + 'run_pipeline': ('name', 'gcs_ingest_pipeline', 'gcs_ingest_with_doc_ai_processors_pipeline', 'export_cdw_pipeline', 'process_with_doc_ai_pipeline', 'request_metadata', ), 'search_documents': ('parent', 'request_metadata', 'document_query', 'offset', 'page_size', 'page_token', 'order_by', 'histogram_queries', 'require_total_size', 'total_result_size', 'qa_size_limit', ), 'set_acl': ('resource', 'policy', 'request_metadata', 'project_owner', ), 'update_document': ('name', 'document', 'request_metadata', 'cloud_ai_document_option', 'update_options', ), diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py index 6ede0f44e1ba..f7f983f00317 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py @@ -32,7 +32,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py index ada0bb505086..607311aca24d 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py @@ -32,7 +32,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py index 560f8369fbac..af1342b48749 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py @@ -1024,6 +1024,7 @@ def test_get_document(request_type, transport: str = "grpc"): text_extraction_enabled=True, creator="creator_value", updater="updater_value", + legal_hold=True, plain_text="plain_text_value", raw_document_path="raw_document_path_value", ) @@ -1055,6 +1056,7 @@ def test_get_document(request_type, transport: str = "grpc"): assert response.text_extraction_enabled is True assert response.creator == "creator_value" assert response.updater == "updater_value" + assert response.legal_hold is True def test_get_document_empty_call(): @@ -1106,6 +1108,7 @@ async def test_get_document_async( text_extraction_enabled=True, creator="creator_value", updater="updater_value", + legal_hold=True, ) ) response = await client.get_document(request) @@ -1136,6 +1139,7 @@ async def test_get_document_async( assert response.text_extraction_enabled is True assert response.creator == "creator_value" assert response.updater == "updater_value" + assert response.legal_hold is True @pytest.mark.asyncio @@ -1767,6 +1771,7 @@ def test_search_documents(request_type, transport: str = "grpc"): call.return_value = document_service.SearchDocumentsResponse( next_page_token="next_page_token_value", total_size=1086, + question_answer="question_answer_value", ) response = client.search_documents(request) @@ -1779,6 +1784,7 @@ def test_search_documents(request_type, transport: str = "grpc"): assert isinstance(response, pagers.SearchDocumentsPager) assert response.next_page_token == "next_page_token_value" assert response.total_size == 1086 + assert response.question_answer == "question_answer_value" def test_search_documents_empty_call(): @@ -1818,6 +1824,7 @@ async def test_search_documents_async( document_service.SearchDocumentsResponse( next_page_token="next_page_token_value", total_size=1086, + question_answer="question_answer_value", ) ) response = await client.search_documents(request) @@ -1831,6 +1838,7 @@ async def test_search_documents_async( assert isinstance(response, pagers.SearchDocumentsAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.total_size == 1086 + assert response.question_answer == "question_answer_value" @pytest.mark.asyncio @@ -2212,6 +2220,7 @@ def test_lock_document(request_type, transport: str = "grpc"): text_extraction_enabled=True, creator="creator_value", updater="updater_value", + legal_hold=True, plain_text="plain_text_value", raw_document_path="raw_document_path_value", ) @@ -2243,6 +2252,7 @@ def test_lock_document(request_type, transport: str = "grpc"): assert response.text_extraction_enabled is True assert response.creator == "creator_value" assert response.updater == "updater_value" + assert response.legal_hold is True def test_lock_document_empty_call(): @@ -2294,6 +2304,7 @@ async def test_lock_document_async( text_extraction_enabled=True, creator="creator_value", updater="updater_value", + legal_hold=True, ) ) response = await client.lock_document(request) @@ -2324,6 +2335,7 @@ async def test_lock_document_async( assert response.text_extraction_enabled is True assert response.creator == "creator_value" assert response.updater == "updater_value" + assert response.legal_hold is True @pytest.mark.asyncio @@ -3247,6 +3259,7 @@ def test_get_document_rest(request_type): text_extraction_enabled=True, creator="creator_value", updater="updater_value", + legal_hold=True, plain_text="plain_text_value", raw_document_path="raw_document_path_value", ) @@ -3282,6 +3295,7 @@ def test_get_document_rest(request_type): assert response.text_extraction_enabled is True assert response.creator == "creator_value" assert response.updater == "updater_value" + assert response.legal_hold is True def test_get_document_rest_required_fields( @@ -4072,6 +4086,7 @@ def test_search_documents_rest(request_type): return_value = document_service.SearchDocumentsResponse( next_page_token="next_page_token_value", total_size=1086, + question_answer="question_answer_value", ) # Wrap the value into a proper Response obj @@ -4088,6 +4103,7 @@ def test_search_documents_rest(request_type): assert isinstance(response, pagers.SearchDocumentsPager) assert response.next_page_token == "next_page_token_value" assert response.total_size == 1086 + assert response.question_answer == "question_answer_value" def test_search_documents_rest_required_fields( @@ -4416,6 +4432,7 @@ def test_lock_document_rest(request_type): text_extraction_enabled=True, creator="creator_value", updater="updater_value", + legal_hold=True, plain_text="plain_text_value", raw_document_path="raw_document_path_value", ) @@ -4451,6 +4468,7 @@ def test_lock_document_rest(request_type): assert response.text_extraction_enabled is True assert response.creator == "creator_value" assert response.updater == "updater_value" + assert response.legal_hold is True def test_lock_document_rest_required_fields( diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py new file mode 100644 index 000000000000..9e7df33c573c --- /dev/null +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py @@ -0,0 +1,2270 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +from google.type import expr_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.contentwarehouse_v1.services.pipeline_service import ( + PipelineServiceAsyncClient, + PipelineServiceClient, + transports, +) +from google.cloud.contentwarehouse_v1.types import common, pipeline_service, pipelines + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PipelineServiceClient._get_default_mtls_endpoint(None) is None + assert ( + PipelineServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + PipelineServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PipelineServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PipelineServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PipelineServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PipelineServiceClient, "grpc"), + (PipelineServiceAsyncClient, "grpc_asyncio"), + (PipelineServiceClient, "rest"), + ], +) +def test_pipeline_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.PipelineServiceGrpcTransport, "grpc"), + (transports.PipelineServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PipelineServiceRestTransport, "rest"), + ], +) +def test_pipeline_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PipelineServiceClient, "grpc"), + (PipelineServiceAsyncClient, "grpc_asyncio"), + (PipelineServiceClient, "rest"), + ], +) +def test_pipeline_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) + + +def test_pipeline_service_client_get_transport_class(): + transport = PipelineServiceClient.get_transport_class() + available_transports = [ + transports.PipelineServiceGrpcTransport, + transports.PipelineServiceRestTransport, + ] + assert transport in available_transports + + transport = PipelineServiceClient.get_transport_class("grpc") + assert transport == transports.PipelineServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PipelineServiceClient, transports.PipelineServiceGrpcTransport, "grpc"), + ( + PipelineServiceAsyncClient, + transports.PipelineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PipelineServiceClient, transports.PipelineServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + PipelineServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PipelineServiceClient), +) +@mock.patch.object( + PipelineServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PipelineServiceAsyncClient), +) +def test_pipeline_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PipelineServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PipelineServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PipelineServiceClient, + transports.PipelineServiceGrpcTransport, + "grpc", + "true", + ), + ( + PipelineServiceAsyncClient, + transports.PipelineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PipelineServiceClient, + transports.PipelineServiceGrpcTransport, + "grpc", + "false", + ), + ( + PipelineServiceAsyncClient, + transports.PipelineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + PipelineServiceClient, + transports.PipelineServiceRestTransport, + "rest", + "true", + ), + ( + PipelineServiceClient, + transports.PipelineServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + PipelineServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PipelineServiceClient), +) +@mock.patch.object( + PipelineServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PipelineServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_pipeline_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [PipelineServiceClient, PipelineServiceAsyncClient] +) +@mock.patch.object( + PipelineServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PipelineServiceClient), +) +@mock.patch.object( + PipelineServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PipelineServiceAsyncClient), +) +def test_pipeline_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PipelineServiceClient, transports.PipelineServiceGrpcTransport, "grpc"), + ( + PipelineServiceAsyncClient, + transports.PipelineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PipelineServiceClient, transports.PipelineServiceRestTransport, "rest"), + ], +) +def test_pipeline_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PipelineServiceClient, + transports.PipelineServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PipelineServiceAsyncClient, + transports.PipelineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (PipelineServiceClient, transports.PipelineServiceRestTransport, "rest", None), + ], +) +def test_pipeline_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_pipeline_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.contentwarehouse_v1.services.pipeline_service.transports.PipelineServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PipelineServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PipelineServiceClient, + transports.PipelineServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PipelineServiceAsyncClient, + transports.PipelineServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_pipeline_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "contentwarehouse.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="contentwarehouse.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pipeline_service.RunPipelineRequest, + dict, + ], +) +def test_run_pipeline(request_type, transport: str = "grpc"): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_pipeline), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.run_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pipeline_service.RunPipelineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_run_pipeline_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_pipeline), "__call__") as call: + client.run_pipeline() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pipeline_service.RunPipelineRequest() + + +@pytest.mark.asyncio +async def test_run_pipeline_async( + transport: str = "grpc_asyncio", request_type=pipeline_service.RunPipelineRequest +): + client = PipelineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_pipeline), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.run_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pipeline_service.RunPipelineRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_run_pipeline_async_from_dict(): + await test_run_pipeline_async(request_type=dict) + + +def test_run_pipeline_field_headers(): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pipeline_service.RunPipelineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_pipeline), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.run_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_run_pipeline_field_headers_async(): + client = PipelineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pipeline_service.RunPipelineRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_pipeline), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.run_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_run_pipeline_flattened(): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_pipeline), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_pipeline( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_run_pipeline_flattened_error(): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_pipeline( + pipeline_service.RunPipelineRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_run_pipeline_flattened_async(): + client = PipelineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_pipeline), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_pipeline( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_run_pipeline_flattened_error_async(): + client = PipelineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_pipeline( + pipeline_service.RunPipelineRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pipeline_service.RunPipelineRequest, + dict, + ], +) +def test_run_pipeline_rest(request_type): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_pipeline(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_run_pipeline_rest_required_fields( + request_type=pipeline_service.RunPipelineRequest, +): + transport_class = transports.PipelineServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_pipeline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.run_pipeline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_run_pipeline_rest_unset_required_fields(): + transport = transports.PipelineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.run_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_pipeline_rest_interceptors(null_interceptor): + transport = transports.PipelineServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PipelineServiceRestInterceptor(), + ) + client = PipelineServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.PipelineServiceRestInterceptor, "post_run_pipeline" + ) as post, mock.patch.object( + transports.PipelineServiceRestInterceptor, "pre_run_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pipeline_service.RunPipelineRequest.pb( + pipeline_service.RunPipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = pipeline_service.RunPipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.run_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_pipeline_rest_bad_request( + transport: str = "rest", request_type=pipeline_service.RunPipelineRequest +): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_pipeline(request) + + +def test_run_pipeline_rest_flattened(): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.run_pipeline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*}:runPipeline" % client.transport._host, + args[1], + ) + + +def test_run_pipeline_rest_flattened_error(transport: str = "rest"): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_pipeline( + pipeline_service.RunPipelineRequest(), + name="name_value", + ) + + +def test_run_pipeline_rest_error(): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PipelineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PipelineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PipelineServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PipelineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PipelineServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PipelineServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PipelineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PipelineServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PipelineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PipelineServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PipelineServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PipelineServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PipelineServiceGrpcTransport, + transports.PipelineServiceGrpcAsyncIOTransport, + transports.PipelineServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = PipelineServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PipelineServiceGrpcTransport, + ) + + +def test_pipeline_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PipelineServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_pipeline_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.contentwarehouse_v1.services.pipeline_service.transports.PipelineServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PipelineServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "run_pipeline", + "get_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_pipeline_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.contentwarehouse_v1.services.pipeline_service.transports.PipelineServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PipelineServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_pipeline_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.contentwarehouse_v1.services.pipeline_service.transports.PipelineServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PipelineServiceTransport() + adc.assert_called_once() + + +def test_pipeline_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PipelineServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PipelineServiceGrpcTransport, + transports.PipelineServiceGrpcAsyncIOTransport, + ], +) +def test_pipeline_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PipelineServiceGrpcTransport, + transports.PipelineServiceGrpcAsyncIOTransport, + transports.PipelineServiceRestTransport, + ], +) +def test_pipeline_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PipelineServiceGrpcTransport, grpc_helpers), + (transports.PipelineServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_pipeline_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "contentwarehouse.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="contentwarehouse.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PipelineServiceGrpcTransport, + transports.PipelineServiceGrpcAsyncIOTransport, + ], +) +def test_pipeline_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_pipeline_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.PipelineServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_pipeline_service_rest_lro_client(): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_pipeline_service_host_no_port(transport_name): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="contentwarehouse.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "contentwarehouse.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_pipeline_service_host_with_port(transport_name): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="contentwarehouse.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "contentwarehouse.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://contentwarehouse.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_pipeline_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PipelineServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PipelineServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.run_pipeline._session + session2 = client2.transport.run_pipeline._session + assert session1 != session2 + + +def test_pipeline_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PipelineServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_pipeline_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PipelineServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PipelineServiceGrpcTransport, + transports.PipelineServiceGrpcAsyncIOTransport, + ], +) +def test_pipeline_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PipelineServiceGrpcTransport, + transports.PipelineServiceGrpcAsyncIOTransport, + ], +) +def test_pipeline_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_pipeline_service_grpc_lro_client(): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_pipeline_service_grpc_lro_async_client(): + client = PipelineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cloud_function_path(): + project = "squid" + location = "clam" + function = "whelk" + expected = "projects/{project}/locations/{location}/functions/{function}".format( + project=project, + location=location, + function=function, + ) + actual = PipelineServiceClient.cloud_function_path(project, location, function) + assert expected == actual + + +def test_parse_cloud_function_path(): + expected = { + "project": "octopus", + "location": "oyster", + "function": "nudibranch", + } + path = PipelineServiceClient.cloud_function_path(**expected) + + # Check that the path construction is reversible. + actual = PipelineServiceClient.parse_cloud_function_path(path) + assert expected == actual + + +def test_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = PipelineServiceClient.location_path(project, location) + assert expected == actual + + +def test_parse_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = PipelineServiceClient.location_path(**expected) + + # Check that the path construction is reversible. + actual = PipelineServiceClient.parse_location_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PipelineServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = PipelineServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PipelineServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = PipelineServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = PipelineServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PipelineServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = PipelineServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = PipelineServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PipelineServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = PipelineServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = PipelineServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PipelineServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = PipelineServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = PipelineServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PipelineServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PipelineServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PipelineServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PipelineServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PipelineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = PipelineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = PipelineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = PipelineServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = PipelineServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (PipelineServiceClient, transports.PipelineServiceGrpcTransport), + (PipelineServiceAsyncClient, transports.PipelineServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py index 9bf2a45b3e65..d6fd4f36cdb0 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py @@ -33,7 +33,7 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import json_format from google.type import expr_pb2 # type: ignore diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py index ad6b02ec2d99..b177af629e23 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py @@ -32,7 +32,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import json_format import grpc diff --git a/packages/google-cloud-data-fusion/.flake8 b/packages/google-cloud-data-fusion/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-data-fusion/.flake8 +++ b/packages/google-cloud-data-fusion/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-data-fusion/CHANGELOG.md b/packages/google-cloud-data-fusion/CHANGELOG.md index 6696ce6bada3..909d20bc8bc1 100644 --- a/packages/google-cloud-data-fusion/CHANGELOG.md +++ b/packages/google-cloud-data-fusion/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.8.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-fusion-v1.8.2...google-cloud-data-fusion-v1.8.3) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + ## [1.8.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-fusion-v1.8.1...google-cloud-data-fusion-v1.8.2) (2023-07-05) diff --git a/packages/google-cloud-data-fusion/CONTRIBUTING.rst b/packages/google-cloud-data-fusion/CONTRIBUTING.rst index 6e19229e3e0f..9d1493dfaf6f 100644 --- a/packages/google-cloud-data-fusion/CONTRIBUTING.rst +++ b/packages/google-cloud-data-fusion/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-data-fusion/noxfile.py ********** diff --git a/packages/google-cloud-data-fusion/MANIFEST.in b/packages/google-cloud-data-fusion/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-data-fusion/MANIFEST.in +++ b/packages/google-cloud-data-fusion/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-data-fusion/README.rst b/packages/google-cloud-data-fusion/README.rst index cd5e50984249..24fbd8fff5d7 100644 --- a/packages/google-cloud-data-fusion/README.rst +++ b/packages/google-cloud-data-fusion/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-data-fusion + pip install google-cloud-data-fusion Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-data-fusion + py -m venv + .\\Scripts\activate + pip install google-cloud-data-fusion Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-data-fusion/docs/conf.py b/packages/google-cloud-data-fusion/docs/conf.py index 1823b01efc5a..615516804535 100644 --- a/packages/google-cloud-data-fusion/docs/conf.py +++ b/packages/google-cloud-data-fusion/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py index 360a0d13ebdd..f9e85c87b4d6 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.8.3" # {x-release-please-version} diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py index 360a0d13ebdd..f9e85c87b4d6 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.8.3" # {x-release-please-version} diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/types/datafusion.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/types/datafusion.py index 16cf4fa3584c..859f2f628954 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/types/datafusion.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/types/datafusion.py @@ -303,6 +303,7 @@ class Instance(proto.Message): User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines. + This allows users to have fine-grained access control on Dataproc's accesses to cloud resources. diff --git a/packages/google-cloud-data-fusion/noxfile.py b/packages/google-cloud-data-fusion/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-data-fusion/noxfile.py +++ b/packages/google-cloud-data-fusion/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json b/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json index 1c5648e25de2..8a26b01caeaf 100644 --- a/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json +++ b/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-data-fusion", - "version": "0.1.0" + "version": "1.8.3" }, "snippets": [ { diff --git a/packages/google-cloud-data-fusion/scripts/decrypt-secrets.sh b/packages/google-cloud-data-fusion/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-data-fusion/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-data-fusion/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py index 24b1626dee86..36394158ae12 100644 --- a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py +++ b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py @@ -41,7 +41,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/google-cloud-data-qna/.flake8 b/packages/google-cloud-data-qna/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-data-qna/.flake8 +++ b/packages/google-cloud-data-qna/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-data-qna/CHANGELOG.md b/packages/google-cloud-data-qna/CHANGELOG.md index 44bedbe585f2..6a7278330624 100644 --- a/packages/google-cloud-data-qna/CHANGELOG.md +++ b/packages/google-cloud-data-qna/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.10.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-qna-v0.10.3...google-cloud-data-qna-v0.10.4) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + ## [0.10.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-qna-v0.10.2...google-cloud-data-qna-v0.10.3) (2023-07-05) diff --git a/packages/google-cloud-data-qna/CONTRIBUTING.rst b/packages/google-cloud-data-qna/CONTRIBUTING.rst index 2bab5db119cc..05519deb6e8f 100644 --- a/packages/google-cloud-data-qna/CONTRIBUTING.rst +++ b/packages/google-cloud-data-qna/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-data-qna/noxfile.py ********** diff --git a/packages/google-cloud-data-qna/MANIFEST.in b/packages/google-cloud-data-qna/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-data-qna/MANIFEST.in +++ b/packages/google-cloud-data-qna/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-data-qna/README.rst b/packages/google-cloud-data-qna/README.rst index ff4333f7ae7a..b82b929f1abf 100644 --- a/packages/google-cloud-data-qna/README.rst +++ b/packages/google-cloud-data-qna/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-data-qna + pip install google-cloud-data-qna Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-data-qna + py -m venv + .\\Scripts\activate + pip install google-cloud-data-qna Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-data-qna/docs/conf.py b/packages/google-cloud-data-qna/docs/conf.py index c07071a57b8f..3ceb0d2b1250 100644 --- a/packages/google-cloud-data-qna/docs/conf.py +++ b/packages/google-cloud-data-qna/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py b/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py index 360a0d13ebdd..0bfda03894ef 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.10.4" # {x-release-please-version} diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py index 360a0d13ebdd..0bfda03894ef 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.10.4" # {x-release-please-version} diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/types/question.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/types/question.py index 60986d534290..7c0c1144ddfc 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/types/question.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/types/question.py @@ -89,6 +89,7 @@ class Question(proto.Message): An error field explaining why interpretation failed. This is only populated if the interpretation failed. + Note: This is different from getting a status error on the request itself. This is not a client or server error and the Question resource diff --git a/packages/google-cloud-data-qna/noxfile.py b/packages/google-cloud-data-qna/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-data-qna/noxfile.py +++ b/packages/google-cloud-data-qna/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json b/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json index 2325b770fe09..97bfbed4eb54 100644 --- a/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json +++ b/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-data-qna", - "version": "0.1.0" + "version": "0.10.4" }, "snippets": [ { diff --git a/packages/google-cloud-data-qna/scripts/decrypt-secrets.sh b/packages/google-cloud-data-qna/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-data-qna/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-data-qna/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datacatalog-lineage/.flake8 b/packages/google-cloud-datacatalog-lineage/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-datacatalog-lineage/.flake8 +++ b/packages/google-cloud-datacatalog-lineage/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datacatalog-lineage/CHANGELOG.md b/packages/google-cloud-datacatalog-lineage/CHANGELOG.md index 15981d5898bc..d2d38a675423 100644 --- a/packages/google-cloud-datacatalog-lineage/CHANGELOG.md +++ b/packages/google-cloud-datacatalog-lineage/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.2.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-lineage-v0.2.3...google-cloud-datacatalog-lineage-v0.2.4) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + ## [0.2.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-lineage-v0.2.2...google-cloud-datacatalog-lineage-v0.2.3) (2023-07-05) diff --git a/packages/google-cloud-datacatalog-lineage/CONTRIBUTING.rst b/packages/google-cloud-datacatalog-lineage/CONTRIBUTING.rst index 69f7bffedeca..f36023350110 100644 --- a/packages/google-cloud-datacatalog-lineage/CONTRIBUTING.rst +++ b/packages/google-cloud-datacatalog-lineage/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-datacatalog-lineage/noxfile.py ********** diff --git a/packages/google-cloud-datacatalog-lineage/MANIFEST.in b/packages/google-cloud-datacatalog-lineage/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-datacatalog-lineage/MANIFEST.in +++ b/packages/google-cloud-datacatalog-lineage/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datacatalog-lineage/README.rst b/packages/google-cloud-datacatalog-lineage/README.rst index c9d70c6ed45b..c5012b1762a7 100644 --- a/packages/google-cloud-datacatalog-lineage/README.rst +++ b/packages/google-cloud-datacatalog-lineage/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-datacatalog-lineage + pip install google-cloud-datacatalog-lineage Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-datacatalog-lineage + py -m venv + .\\Scripts\activate + pip install google-cloud-datacatalog-lineage Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-datacatalog-lineage/docs/conf.py b/packages/google-cloud-datacatalog-lineage/docs/conf.py index 2cddbd2744b3..ccc9bc681d0e 100644 --- a/packages/google-cloud-datacatalog-lineage/docs/conf.py +++ b/packages/google-cloud-datacatalog-lineage/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage/gapic_version.py index 360a0d13ebdd..1209889848a4 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.4" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/gapic_version.py index 360a0d13ebdd..1209889848a4 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.4" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/async_client.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/async_client.py index 9930c90ac4ec..91d19c239b22 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/async_client.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/async_client.py @@ -44,7 +44,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/client.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/client.py index d034b0bab6b4..a49bca479940 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/client.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/client.py @@ -48,7 +48,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/transports/rest.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/transports/rest.py index 0d855cc91e90..3f3ceb009426 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/transports/rest.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog/lineage_v1/services/lineage/transports/rest.py @@ -32,7 +32,6 @@ from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-datacatalog-lineage/noxfile.py b/packages/google-cloud-datacatalog-lineage/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-datacatalog-lineage/noxfile.py +++ b/packages/google-cloud-datacatalog-lineage/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json index 8187e1a0307b..4bc664bf4242 100644 --- a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json +++ b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog-lineage", - "version": "0.1.0" + "version": "0.2.4" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog-lineage/scripts/decrypt-secrets.sh b/packages/google-cloud-datacatalog-lineage/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-datacatalog-lineage/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-datacatalog-lineage/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/lineage_v1/test_lineage.py b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/lineage_v1/test_lineage.py index ff4f3a3ecf5b..949c6ce67c82 100644 --- a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/lineage_v1/test_lineage.py +++ b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/lineage_v1/test_lineage.py @@ -41,7 +41,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/google-cloud-datacatalog/.flake8 b/packages/google-cloud-datacatalog/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-datacatalog/.flake8 +++ b/packages/google-cloud-datacatalog/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datacatalog/CHANGELOG.md b/packages/google-cloud-datacatalog/CHANGELOG.md index 6f8fb061c017..191d61a2824a 100644 --- a/packages/google-cloud-datacatalog/CHANGELOG.md +++ b/packages/google-cloud-datacatalog/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-datacatalog/#history +## [3.15.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.15.1...google-cloud-datacatalog-v3.15.2) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + +## [3.15.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.15.0...google-cloud-datacatalog-v3.15.1) (2023-08-31) + + +### Documentation + +* fix typo ([#11600](https://github.com/googleapis/google-cloud-python/issues/11600)) ([cc8021a](https://github.com/googleapis/google-cloud-python/commit/cc8021ab449c4be5346afbee42de573e812dc274)) + +## [3.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.14.0...google-cloud-datacatalog-v3.15.0) (2023-08-09) + + +### Features + +* add support for admin_search in SearchCatalog() API method ([#11539](https://github.com/googleapis/google-cloud-python/issues/11539)) ([6dced1a](https://github.com/googleapis/google-cloud-python/commit/6dced1ad7fa20372241b7384cfbeabf0ad6f5e25)) + ## [3.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.13.1...google-cloud-datacatalog-v3.14.0) (2023-07-10) diff --git a/packages/google-cloud-datacatalog/CONTRIBUTING.rst b/packages/google-cloud-datacatalog/CONTRIBUTING.rst index bcd645b0768a..597e8dd95079 100644 --- a/packages/google-cloud-datacatalog/CONTRIBUTING.rst +++ b/packages/google-cloud-datacatalog/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-datacatalog/noxfile.py ********** diff --git a/packages/google-cloud-datacatalog/MANIFEST.in b/packages/google-cloud-datacatalog/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-datacatalog/MANIFEST.in +++ b/packages/google-cloud-datacatalog/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datacatalog/README.rst b/packages/google-cloud-datacatalog/README.rst index 7860e68aa54c..20f66fde4381 100644 --- a/packages/google-cloud-datacatalog/README.rst +++ b/packages/google-cloud-datacatalog/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-datacatalog + pip install google-cloud-datacatalog Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-datacatalog + py -m venv + .\\Scripts\activate + pip install google-cloud-datacatalog Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-datacatalog/docs/conf.py b/packages/google-cloud-datacatalog/docs/conf.py index 930ad6762a5b..8276938ba7df 100644 --- a/packages/google-cloud-datacatalog/docs/conf.py +++ b/packages/google-cloud-datacatalog/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py index cd6578cfc6b0..dcf8378be674 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.14.0" # {x-release-please-version} +__version__ = "3.15.2" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py index cd6578cfc6b0..dcf8378be674 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.14.0" # {x-release-please-version} +__version__ = "3.15.2" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py index 7efcfaf1bd0f..e8d81173595f 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -46,7 +46,7 @@ from google.api_core import operation_async # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.cloud.datacatalog_v1.services.data_catalog import pagers @@ -472,6 +472,7 @@ async def sample_create_entry_group(): Required. The names of the project and location that the new entry group belongs to. + Note: The entry group itself and its child resources might not be stored in the location specified in its name. @@ -1544,6 +1545,7 @@ async def lookup_entry( metadata: Sequence[Tuple[str, str]] = (), ) -> datacatalog.Entry: r"""Gets an entry by its target resource name. + The resource name comes from the source Google Cloud Platform service. @@ -2835,6 +2837,7 @@ async def rename_tag_template_field_enum_value( metadata: Sequence[Tuple[str, str]] = (), ) -> tags.TagTemplateField: r"""Renames an enum value in a tag template. + Within a single enum field, enum values must be unique. .. code-block:: python @@ -3946,42 +3949,11 @@ async def sample_set_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM @@ -4134,42 +4106,11 @@ async def sample_get_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM @@ -4229,8 +4170,10 @@ async def test_iam_permissions( metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Gets your permissions on a resource. + Returns an empty set of permissions if the resource doesn't exist. + Supported resources are: - Tag templates @@ -4240,6 +4183,7 @@ async def test_iam_permissions( and can't be used to get policies from BigQuery, Pub/Sub, Dataproc Metastore, and any external Google Cloud Platform resources ingested into Data Catalog. + No Google IAM permissions are required to call this method. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py index 9a283c1a6bb8..d6f5feb4c4d9 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -50,7 +50,7 @@ from google.api_core import operation_async # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.cloud.datacatalog_v1.services.data_catalog import pagers @@ -809,6 +809,7 @@ def sample_create_entry_group(): Required. The names of the project and location that the new entry group belongs to. + Note: The entry group itself and its child resources might not be stored in the location specified in its name. @@ -1881,6 +1882,7 @@ def lookup_entry( metadata: Sequence[Tuple[str, str]] = (), ) -> datacatalog.Entry: r"""Gets an entry by its target resource name. + The resource name comes from the source Google Cloud Platform service. @@ -3181,6 +3183,7 @@ def rename_tag_template_field_enum_value( metadata: Sequence[Tuple[str, str]] = (), ) -> tags.TagTemplateField: r"""Renames an enum value in a tag template. + Within a single enum field, enum values must be unique. .. code-block:: python @@ -4297,42 +4300,11 @@ def sample_set_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM @@ -4482,42 +4454,11 @@ def sample_get_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM @@ -4574,8 +4515,10 @@ def test_iam_permissions( metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Gets your permissions on a resource. + Returns an empty set of permissions if the resource doesn't exist. + Supported resources are: - Tag templates @@ -4585,6 +4528,7 @@ def test_iam_permissions( and can't be used to get policies from BigQuery, Pub/Sub, Dataproc Metastore, and any external Google Cloud Platform resources ingested into Data Catalog. + No Google IAM permissions are required to call this method. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py index d0eb0c98bcca..fc1d95831d1a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py @@ -597,6 +597,7 @@ def lookup_entry( r"""Return a callable for the lookup entry method over gRPC. Gets an entry by its target resource name. + The resource name comes from the source Google Cloud Platform service. @@ -938,6 +939,7 @@ def rename_tag_template_field_enum_value( value method over gRPC. Renames an enum value in a tag template. + Within a single enum field, enum values must be unique. Returns: @@ -1305,8 +1307,10 @@ def test_iam_permissions( r"""Return a callable for the test iam permissions method over gRPC. Gets your permissions on a resource. + Returns an empty set of permissions if the resource doesn't exist. + Supported resources are: - Tag templates @@ -1316,6 +1320,7 @@ def test_iam_permissions( and can't be used to get policies from BigQuery, Pub/Sub, Dataproc Metastore, and any external Google Cloud Platform resources ingested into Data Catalog. + No Google IAM permissions are required to call this method. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py index c56253eac12a..f51e2f017591 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py @@ -611,6 +611,7 @@ def lookup_entry( r"""Return a callable for the lookup entry method over gRPC. Gets an entry by its target resource name. + The resource name comes from the source Google Cloud Platform service. @@ -965,6 +966,7 @@ def rename_tag_template_field_enum_value( value method over gRPC. Renames an enum value in a tag template. + Within a single enum field, enum values must be unique. Returns: @@ -1348,8 +1350,10 @@ def test_iam_permissions( r"""Return a callable for the test iam permissions method over gRPC. Gets your permissions on a resource. + Returns an empty set of permissions if the resource doesn't exist. + Supported resources are: - Tag templates @@ -1359,6 +1363,7 @@ def test_iam_permissions( and can't be used to get policies from BigQuery, Pub/Sub, Dataproc Metastore, and any external Google Cloud Platform resources ingested into Data Catalog. + No Google IAM permissions are required to call this method. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py index d40db6609330..53187048b349 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py @@ -44,7 +44,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers from google.cloud.datacatalog_v1.types import policytagmanager, timestamps @@ -237,6 +237,7 @@ async def create_taxonomy( metadata: Sequence[Tuple[str, str]] = (), ) -> policytagmanager.Taxonomy: r"""Creates a taxonomy in a specified project. + The taxonomy is initially empty, that is, it doesn't contain policy tags. @@ -1406,42 +1407,11 @@ async def sample_get_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM @@ -1548,42 +1518,11 @@ async def sample_set_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py index 770de2cef70e..ac40ce625ea0 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py @@ -48,7 +48,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers from google.cloud.datacatalog_v1.types import policytagmanager, timestamps @@ -486,6 +486,7 @@ def create_taxonomy( metadata: Sequence[Tuple[str, str]] = (), ) -> policytagmanager.Taxonomy: r"""Creates a taxonomy in a specified project. + The taxonomy is initially empty, that is, it doesn't contain policy tags. @@ -1655,42 +1656,11 @@ def sample_get_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM @@ -1796,42 +1766,11 @@ def sample_set_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py index f5f27c634949..aa4a37bded43 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py @@ -24,7 +24,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py index 651d240acf75..1ef85d20146a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py @@ -22,7 +22,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore @@ -244,6 +244,7 @@ def create_taxonomy( r"""Return a callable for the create taxonomy method over gRPC. Creates a taxonomy in a specified project. + The taxonomy is initially empty, that is, it doesn't contain policy tags. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py index 616deac6c31a..9a0e7982d9a2 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -21,7 +21,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -249,6 +249,7 @@ def create_taxonomy( r"""Return a callable for the create taxonomy method over gRPC. Creates a taxonomy in a specified project. + The taxonomy is initially empty, that is, it doesn't contain policy tags. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py index d0579f7548a8..ce772eef6bd7 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py @@ -44,7 +44,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.datacatalog_v1.types import ( policytagmanager, @@ -61,6 +61,7 @@ class PolicyTagManagerSerializationAsyncClient: """Policy Tag Manager Serialization API service allows you to manipulate your policy tags and taxonomies in a serialized format. + Taxonomy is a hierarchical group of policy tags. """ @@ -355,6 +356,7 @@ async def import_taxonomies( r"""Creates new taxonomies (including their policy tags) in a given project by importing from inlined or cross-regional sources. + For a cross-regional source, new taxonomies are created by copying from a source in another region. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py index 9a2b36285116..39cc48ee9045 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py @@ -48,7 +48,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.datacatalog_v1.types import ( policytagmanager, @@ -105,6 +105,7 @@ class PolicyTagManagerSerializationClient( """Policy Tag Manager Serialization API service allows you to manipulate your policy tags and taxonomies in a serialized format. + Taxonomy is a hierarchical group of policy tags. """ @@ -580,6 +581,7 @@ def import_taxonomies( r"""Creates new taxonomies (including their policy tags) in a given project by importing from inlined or cross-regional sources. + For a cross-regional source, new taxonomies are created by copying from a source in another region. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py index f83b672a85f5..a00cbf728d0e 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py @@ -24,7 +24,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.datacatalog_v1 import gapic_version as package_version diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py index 80a2f27fef35..a21f9f7d0f7a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py @@ -22,7 +22,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from google.cloud.datacatalog_v1.types import ( @@ -41,6 +41,7 @@ class PolicyTagManagerSerializationGrpcTransport( Policy Tag Manager Serialization API service allows you to manipulate your policy tags and taxonomies in a serialized format. + Taxonomy is a hierarchical group of policy tags. This class defines the same methods as the primary client, so the @@ -292,6 +293,7 @@ def import_taxonomies( Creates new taxonomies (including their policy tags) in a given project by importing from inlined or cross-regional sources. + For a cross-regional source, new taxonomies are created by copying from a source in another region. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py index 41046a88573c..c69e6cf944fc 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py @@ -21,7 +21,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -42,6 +42,7 @@ class PolicyTagManagerSerializationGrpcAsyncIOTransport( Policy Tag Manager Serialization API service allows you to manipulate your policy tags and taxonomies in a serialized format. + Taxonomy is a hierarchical group of policy tags. This class defines the same methods as the primary client, so the @@ -295,6 +296,7 @@ def import_taxonomies( Creates new taxonomies (including their policy tags) in a given project by importing from inlined or cross-regional sources. + For a cross-regional source, new taxonomies are created by copying from a source in another region. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py index 731b0547f7c8..70df39034914 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py @@ -238,6 +238,14 @@ class SearchCatalogRequest(proto.Message): If this parameter is omitted, it defaults to the descending ``relevance``. + admin_search (bool): + Optional. If set, use searchAll permission granted on + organizations from ``include_org_ids`` and projects from + ``include_project_ids`` instead of the fine grained per + resource permissions when filtering the search results. The + only allowed ``order_by`` criteria for admin_search mode is + ``default``. Using this flags guarantees a full recall of + the search results. """ class Scope(proto.Message): @@ -332,6 +340,10 @@ class Scope(proto.Message): proto.STRING, number=5, ) + admin_search: bool = proto.Field( + proto.BOOL, + number=17, + ) class SearchCatalogResponse(proto.Message): @@ -389,6 +401,7 @@ class CreateEntryGroupRequest(proto.Message): parent (str): Required. The names of the project and location that the new entry group belongs to. + Note: The entry group itself and its child resources might not be stored in the location specified in its name. @@ -930,12 +943,14 @@ class Entry(proto.Message): This field is a member of `oneof`_ ``spec``. display_name (str): Display name of an entry. + The maximum size is 500 bytes when encoded in UTF-8. Default value is an empty string. description (str): Entry description that can consist of several sentences or paragraphs that describe entry contents. + The description must not contain Unicode non-characters as well as C0 and C1 control codes except tabs (HT), new lines (LF), carriage @@ -960,6 +975,7 @@ class Entry(proto.Message): Resource usage statistics. labels (MutableMapping[str, str]): Cloud labels attached to the entry. + In Data Catalog, you can create and modify labels attached only to custom entries. Synced entries have unmodifiable labels that come from @@ -1579,6 +1595,7 @@ class EntryOverview(proto.Message): Attributes: overview (str): Entry overview with support for rich text. + The overview must only contain Unicode characters, and should be formatted using HTML. The maximum length is 10 MiB as this value holds diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/dataplex_spec.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/dataplex_spec.py index e18239f03712..4e87bcb8684e 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/dataplex_spec.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/dataplex_spec.py @@ -95,6 +95,7 @@ class DataplexTableSpec(proto.Message): List of external tables registered by Dataplex in other systems based on the same underlying data. + External tables allow to query this data in those systems. dataplex_spec (google.cloud.datacatalog_v1.types.DataplexSpec): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py index e743381a5a26..d0fd1e9c0d45 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py @@ -77,6 +77,7 @@ class Taxonomy(proto.Message): taxonomy IDs. display_name (str): Required. User-defined name of this taxonomy. + The name can't start or end with spaces, must contain only Unicode letters, numbers, underscores, dashes, and spaces, and be at most @@ -213,6 +214,7 @@ class PolicyTag(proto.Message): Resource name of this policy tag's parent policy tag. If empty, this is a top level tag. If not set, defaults to an empty string. + For example, for the "LatLong" policy tag in the example above, this field contains the resource name of the "Geolocation" policy tag, and, for @@ -329,6 +331,7 @@ class ListTaxonomiesRequest(proto.Message): page_token (str): The pagination token of the next results page. If not set, the first page is returned. + The token is returned in the response to a previous list request. filter (str): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/schema.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/schema.py index e1a44e5cf043..267419c69a6a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/schema.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/schema.py @@ -35,6 +35,7 @@ class Schema(proto.Message): Attributes: columns (MutableSequence[google.cloud.datacatalog_v1.types.ColumnSchema]): The unified GoogleSQL-like schema of columns. + The overall maximum number of columns and nested columns is 10,000. The maximum nested depth is 15 levels. @@ -57,10 +58,12 @@ class ColumnSchema(proto.Message): Attributes: column (str): Required. Name of the column. + Must be a UTF-8 string without dots (.). The maximum size is 64 bytes. type_ (str): Required. Type of the column. + Must be a UTF-8 string with the maximum size of 128 bytes. description (str): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/search.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/search.py index f4045e8505d4..6f53c98a59c2 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/search.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/search.py @@ -52,6 +52,7 @@ class SearchResultType(proto.Enum): class SearchCatalogResult(proto.Message): r"""Result in the response to a search request. + Each result captures details of one entry that matches the search. @@ -65,6 +66,7 @@ class SearchCatalogResult(proto.Message): Attributes: search_result_type (google.cloud.datacatalog_v1.types.SearchResultType): Type of the search result. + You can use this field to determine which get method to call to fetch the full resource. search_result_subtype (str): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py index 99ba926f5c8e..473fc4d74d61 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py @@ -48,6 +48,7 @@ class Tag(proto.Message): name (str): The resource name of the tag in URL format where tag ID is a system-generated identifier. + Note: The tag itself might not be stored in the location specified in its name. template (str): @@ -73,6 +74,7 @@ class Tag(proto.Message): Required. Maps the ID of a tag field to its value and additional information about that field. + Tag template defines valid field IDs. A tag must have at least 1 field and at most 500 fields. @@ -123,6 +125,7 @@ class TagField(proto.Message): This field is a member of `oneof`_ ``kind``. string_value (str): The value of a tag field with a string type. + The maximum length is 2000 UTF-8 characters. This field is a member of `oneof`_ ``kind``. @@ -137,6 +140,7 @@ class TagField(proto.Message): This field is a member of `oneof`_ ``kind``. enum_value (google.cloud.datacatalog_v1.types.TagField.EnumValue): The value of a tag field with an enum type. + This value must be one of the allowed values listed in this enum. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/timestamps.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/timestamps.py index a58e61652bb6..30f59e59fa87 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/timestamps.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/timestamps.py @@ -39,6 +39,7 @@ class SystemTimestamps(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): Timestamp of the last modification of the resource or its metadata within a given system. + Note: Depending on the source system, not every modification updates this timestamp. For example, BigQuery timestamps every metadata diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/usage.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/usage.py index 3c09310fd089..4e4a96af0e2c 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/usage.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/usage.py @@ -77,6 +77,7 @@ class UsageStats(proto.Message): class CommonUsageStats(proto.Message): r"""Common statistics on the entry's usage. + They can be set on any system. @@ -98,6 +99,7 @@ class CommonUsageStats(proto.Message): class UsageSignal(proto.Message): r"""The set of all usage signals that Data Catalog stores. + Note: Usually, these signals are updated daily. In rare cases, an update may fail but will be performed again on the next day. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py index cd6578cfc6b0..dcf8378be674 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.14.0" # {x-release-please-version} +__version__ = "3.15.2" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py index 84f1e0785417..c93b8b5330fe 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py @@ -44,7 +44,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.cloud.datacatalog_v1beta1.services.data_catalog import pagers @@ -3418,42 +3418,11 @@ async def sample_set_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM @@ -3603,42 +3572,11 @@ async def sample_get_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py index 3857c7574a2b..788784faffdc 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py @@ -48,7 +48,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.cloud.datacatalog_v1beta1.services.data_catalog import pagers @@ -3766,42 +3766,11 @@ def sample_set_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM @@ -3948,42 +3917,11 @@ def sample_get_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py index 07a3239d6c91..cb35637799b8 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py @@ -24,7 +24,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py index fc30e73e4ba7..a8b4e77fcebb 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py @@ -22,7 +22,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py index 148744e4ea19..84fc13948c7b 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py @@ -21,7 +21,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py index 94fec3a4b92e..fa877fd9ba2d 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py @@ -44,7 +44,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers from google.cloud.datacatalog_v1beta1.types import policytagmanager, timestamps @@ -825,11 +825,12 @@ async def sample_create_policy_tag(): Denotes one policy tag in a taxonomy (e.g. ssn). Policy Tags can be defined in a hierarchy. For example, consider - the following hierarchy: Geolocation - -> (LatLong, City, ZipCode). - PolicyTag "Geolocation" contains three - child policy tags: "LatLong", "City", - and "ZipCode". + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. @@ -1027,11 +1028,12 @@ async def sample_update_policy_tag(): Denotes one policy tag in a taxonomy (e.g. ssn). Policy Tags can be defined in a hierarchy. For example, consider - the following hierarchy: Geolocation - -> (LatLong, City, ZipCode). - PolicyTag "Geolocation" contains three - child policy tags: "LatLong", "City", - and "ZipCode". + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. @@ -1252,11 +1254,12 @@ async def sample_get_policy_tag(): Denotes one policy tag in a taxonomy (e.g. ssn). Policy Tags can be defined in a hierarchy. For example, consider - the following hierarchy: Geolocation - -> (LatLong, City, ZipCode). - PolicyTag "Geolocation" contains three - child policy tags: "LatLong", "City", - and "ZipCode". + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. @@ -1370,42 +1373,11 @@ async def sample_get_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM @@ -1512,42 +1484,11 @@ async def sample_set_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py index 980ce0887323..06b7bf99d10a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py @@ -48,7 +48,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers from google.cloud.datacatalog_v1beta1.types import policytagmanager, timestamps @@ -1074,11 +1074,12 @@ def sample_create_policy_tag(): Denotes one policy tag in a taxonomy (e.g. ssn). Policy Tags can be defined in a hierarchy. For example, consider - the following hierarchy: Geolocation - -> (LatLong, City, ZipCode). - PolicyTag "Geolocation" contains three - child policy tags: "LatLong", "City", - and "ZipCode". + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. @@ -1276,11 +1277,12 @@ def sample_update_policy_tag(): Denotes one policy tag in a taxonomy (e.g. ssn). Policy Tags can be defined in a hierarchy. For example, consider - the following hierarchy: Geolocation - -> (LatLong, City, ZipCode). - PolicyTag "Geolocation" contains three - child policy tags: "LatLong", "City", - and "ZipCode". + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. @@ -1501,11 +1503,12 @@ def sample_get_policy_tag(): Denotes one policy tag in a taxonomy (e.g. ssn). Policy Tags can be defined in a hierarchy. For example, consider - the following hierarchy: Geolocation - -> (LatLong, City, ZipCode). - PolicyTag "Geolocation" contains three - child policy tags: "LatLong", "City", - and "ZipCode". + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". """ # Create or coerce a protobuf request object. @@ -1619,42 +1622,11 @@ def sample_get_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM @@ -1760,42 +1732,11 @@ def sample_set_iam_policy(): **JSON example:** - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": [ "user:eve@example.com" ], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ], "etag": "BwWWja0YfJA=", "version": 3 - - } + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` **YAML example:** - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') etag: - BwWWja0YfJA= version: 3 + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` For a description of IAM and its features, see the [IAM diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py index 7bb77a9dd71a..77eed922b27d 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py @@ -24,7 +24,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py index 0987d4926462..7a814ee0c4c4 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py @@ -22,7 +22,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py index 757f7a9db35c..5459e76f4ba2 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -21,7 +21,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py index 9bac57eba529..f9aad33b4ff7 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py @@ -44,7 +44,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.datacatalog_v1beta1.types import ( policytagmanager, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py index c7c3434d25ae..9490445b0458 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py @@ -48,7 +48,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.cloud.datacatalog_v1beta1.types import ( policytagmanager, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py index 8c38b8d57a86..600a5d198b8f 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py @@ -24,7 +24,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.datacatalog_v1beta1 import gapic_version as package_version diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py index ec83c0c91519..85aa40672526 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py @@ -22,7 +22,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py index 25dc56b557ab..a15b1c9d9f3f 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py @@ -21,7 +21,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanager.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanager.py index d67b22934ed7..3861cef6226d 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanager.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanager.py @@ -61,6 +61,7 @@ class Taxonomy(proto.Message): underscores, dashes and spaces; not start or end with spaces; and be at most 200 bytes long when encoded in UTF-8. + The taxonomy display name must be unique within an organization. description (str): @@ -156,9 +157,11 @@ class Service(proto.Message): class PolicyTag(proto.Message): r"""Denotes one policy tag in a taxonomy (e.g. ssn). Policy Tags can be defined in a hierarchy. For example, consider the - following hierarchy: Geolocation -> (LatLong, City, ZipCode). - PolicyTag "Geolocation" contains three child policy tags: - "LatLong", "City", and "ZipCode". + following hierarchy: + + Geolocation -> (LatLong, City, ZipCode). PolicyTag + "Geolocation" contains three child policy tags: "LatLong", + "City", and "ZipCode". Attributes: name (str): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py index c979086d4184..076604d2770e 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py @@ -85,6 +85,7 @@ class SerializedPolicyTag(proto.Message): Attributes: policy_tag (str): Resource name of the policy tag. + This field will be ignored when calling ImportTaxonomies. display_name (str): diff --git a/packages/google-cloud-datacatalog/noxfile.py b/packages/google-cloud-datacatalog/noxfile.py index 369d391524fc..9a2acd8b6787 100644 --- a/packages/google-cloud-datacatalog/noxfile.py +++ b/packages/google-cloud-datacatalog/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -381,6 +381,7 @@ def prerelease_deps(session): "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", + "google-auth", "proto-plus", "google-cloud-testutils", # dependencies of google-cloud-testutils" @@ -393,7 +394,6 @@ def prerelease_deps(session): # Remaining dependencies other_deps = [ "requests", - "google-auth", ] session.install(*other_deps) @@ -402,6 +402,7 @@ def prerelease_deps(session): "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" ) session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json index d8685e424865..187663629fc1 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.14.0" + "version": "3.15.2" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json index 1d2938166b61..8872ba807a0a 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.14.0" + "version": "3.15.2" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/scripts/decrypt-secrets.sh b/packages/google-cloud-datacatalog/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-datacatalog/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-datacatalog/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py b/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py index fcc2bc853fb6..7fe4359520fe 100644 --- a/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py +++ b/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py @@ -74,7 +74,7 @@ class datacatalogCallTransformer(cst.CSTTransformer): 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), 'rename_tag_template_field_enum_value': ('name', 'new_enum_value_display_name', ), 'replace_taxonomy': ('name', 'serialized_taxonomy', ), - 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), + 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', 'admin_search', ), 'set_iam_policy': ('resource', 'policy', 'update_mask', ), 'star_entry': ('name', ), 'test_iam_permissions': ('resource', 'permissions', ), diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index 67adb45cc120..69c5919b5aa6 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -42,7 +42,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py index 2a28ed275139..76b9958c5ce6 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py @@ -33,7 +33,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py index bd6fe0651a17..abc4b0bd7c10 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py @@ -33,7 +33,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account import grpc from grpc.experimental import aio diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py index 07bca2f3c53e..30ec846f4058 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -33,7 +33,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py index c73aae406a3a..911cb937ae36 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -33,7 +33,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py index 406c980f88dd..bb8a000a7e50 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py @@ -33,7 +33,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account import grpc from grpc.experimental import aio diff --git a/packages/google-cloud-dataform/.flake8 b/packages/google-cloud-dataform/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-dataform/.flake8 +++ b/packages/google-cloud-dataform/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-dataform/CHANGELOG.md b/packages/google-cloud-dataform/CHANGELOG.md index 6162eff25b56..229138e9edf0 100644 --- a/packages/google-cloud-dataform/CHANGELOG.md +++ b/packages/google-cloud-dataform/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.5.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataform-v0.5.2...google-cloud-dataform-v0.5.3) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + ## [0.5.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataform-v0.5.1...google-cloud-dataform-v0.5.2) (2023-07-05) diff --git a/packages/google-cloud-dataform/CONTRIBUTING.rst b/packages/google-cloud-dataform/CONTRIBUTING.rst index c29402786578..61d8c121d485 100644 --- a/packages/google-cloud-dataform/CONTRIBUTING.rst +++ b/packages/google-cloud-dataform/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-dataform/noxfile.py ********** diff --git a/packages/google-cloud-dataform/MANIFEST.in b/packages/google-cloud-dataform/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-dataform/MANIFEST.in +++ b/packages/google-cloud-dataform/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-dataform/README.rst b/packages/google-cloud-dataform/README.rst index cf206f7e2436..eef26e0af80b 100644 --- a/packages/google-cloud-dataform/README.rst +++ b/packages/google-cloud-dataform/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-dataform + pip install google-cloud-dataform Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-dataform + py -m venv + .\\Scripts\activate + pip install google-cloud-dataform Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-dataform/docs/conf.py b/packages/google-cloud-dataform/docs/conf.py index b46b666d031c..fabebb444ced 100644 --- a/packages/google-cloud-dataform/docs/conf.py +++ b/packages/google-cloud-dataform/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py b/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py index 360a0d13ebdd..7951a72dc56f 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py +++ b/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.5.3" # {x-release-please-version} diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py index 360a0d13ebdd..7951a72dc56f 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.5.3" # {x-release-please-version} diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/types/dataform.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/types/dataform.py index a79a1b8367b9..02f96ec4d23e 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/types/dataform.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/types/dataform.py @@ -1197,6 +1197,7 @@ class CodeCompilationConfig(proto.Message): Optional. The default BigQuery location to use. Defaults to "US". See the BigQuery docs for a full list of locations: + https://cloud.google.com/bigquery/docs/locations. assertion_schema (str): Optional. The default schema (BigQuery diff --git a/packages/google-cloud-dataform/noxfile.py b/packages/google-cloud-dataform/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-dataform/noxfile.py +++ b/packages/google-cloud-dataform/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json b/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json index acdf6fbdc918..3c8b8ebf877c 100644 --- a/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json +++ b/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataform", - "version": "0.1.0" + "version": "0.5.3" }, "snippets": [ { diff --git a/packages/google-cloud-dataform/scripts/decrypt-secrets.sh b/packages/google-cloud-dataform/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-dataform/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-dataform/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datalabeling/.flake8 b/packages/google-cloud-datalabeling/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-datalabeling/.flake8 +++ b/packages/google-cloud-datalabeling/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datalabeling/CHANGELOG.md b/packages/google-cloud-datalabeling/CHANGELOG.md index 775d86aa3f73..c265e3f31b28 100644 --- a/packages/google-cloud-datalabeling/CHANGELOG.md +++ b/packages/google-cloud-datalabeling/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-datalabeling/#history +## [1.8.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datalabeling-v1.8.3...google-cloud-datalabeling-v1.8.4) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + ## [1.8.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datalabeling-v1.8.2...google-cloud-datalabeling-v1.8.3) (2023-07-05) diff --git a/packages/google-cloud-datalabeling/CONTRIBUTING.rst b/packages/google-cloud-datalabeling/CONTRIBUTING.rst index 0a81ae7bf812..ee7951b5ff8e 100644 --- a/packages/google-cloud-datalabeling/CONTRIBUTING.rst +++ b/packages/google-cloud-datalabeling/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-datalabeling/noxfile.py ********** diff --git a/packages/google-cloud-datalabeling/MANIFEST.in b/packages/google-cloud-datalabeling/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-datalabeling/MANIFEST.in +++ b/packages/google-cloud-datalabeling/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datalabeling/README.rst b/packages/google-cloud-datalabeling/README.rst index b91eb79e3aee..bfd657b3d245 100644 --- a/packages/google-cloud-datalabeling/README.rst +++ b/packages/google-cloud-datalabeling/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-datalabeling + pip install google-cloud-datalabeling Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-datalabeling + py -m venv + .\\Scripts\activate + pip install google-cloud-datalabeling Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-datalabeling/docs/conf.py b/packages/google-cloud-datalabeling/docs/conf.py index 3f34bc9caba8..2f8190d731ce 100644 --- a/packages/google-cloud-datalabeling/docs/conf.py +++ b/packages/google-cloud-datalabeling/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py index 360a0d13ebdd..d618d5ce9750 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.8.4" # {x-release-please-version} diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py index 360a0d13ebdd..d618d5ce9750 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.8.4" # {x-release-please-version} diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py index 97c40b56052b..034ad3a3fbba 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py @@ -3477,6 +3477,7 @@ async def sample_search_evaluations(): Returns: google.cloud.datalabeling_v1beta1.services.data_labeling_service.pagers.SearchEvaluationsAsyncPager: Results of searching evaluations. + Iterating over this object will yield results and resolve additional pages automatically. @@ -4407,6 +4408,7 @@ async def sample_list_evaluation_jobs(): Returns: google.cloud.datalabeling_v1beta1.services.data_labeling_service.pagers.ListEvaluationJobsAsyncPager: Results for listing evaluation jobs. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py index 402a77c46897..2ac0d46a03d9 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py @@ -3659,6 +3659,7 @@ def sample_search_evaluations(): Returns: google.cloud.datalabeling_v1beta1.services.data_labeling_service.pagers.SearchEvaluationsPager: Results of searching evaluations. + Iterating over this object will yield results and resolve additional pages automatically. @@ -4563,6 +4564,7 @@ def sample_list_evaluation_jobs(): Returns: google.cloud.datalabeling_v1beta1.services.data_labeling_service.pagers.ListEvaluationJobsPager: Results for listing evaluation jobs. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/types/human_annotation_config.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/types/human_annotation_config.py index 72319df20fc4..15593037b379 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/types/human_annotation_config.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/types/human_annotation_config.py @@ -100,6 +100,7 @@ class HumanAnnotationConfig(proto.Message): will give them access to the question types in crowdcompute. Note that these emails must be registered in crowdcompute worker UI: + https://crowd-compute.appspot.com/ user_email_address (str): Email of the user who started the labeling @@ -241,10 +242,11 @@ class SegmentationConfig(proto.Message): class VideoClassificationConfig(proto.Message): r"""Config for video classification human labeling task. - Currently two types of video classification are supported: 1. - Assign labels on the entire video. + Currently two types of video classification are supported: + + 1. Assign labels on the entire video. 2. Split the video into multiple video clips based on camera - shot, and assign labels on each video clip. + shot, and assign labels on each video clip. Attributes: annotation_spec_set_configs (MutableSequence[google.cloud.datalabeling_v1beta1.types.VideoClassificationConfig.AnnotationSpecSetConfig]): diff --git a/packages/google-cloud-datalabeling/noxfile.py b/packages/google-cloud-datalabeling/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-datalabeling/noxfile.py +++ b/packages/google-cloud-datalabeling/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json b/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json index 4a6f501c7672..fc67c458be18 100644 --- a/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json +++ b/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datalabeling", - "version": "0.1.0" + "version": "1.8.4" }, "snippets": [ { diff --git a/packages/google-cloud-datalabeling/scripts/decrypt-secrets.sh b/packages/google-cloud-datalabeling/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-datalabeling/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-datalabeling/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py index 65ceb449d7fb..4a2405f051cd 100644 --- a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py +++ b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py @@ -39,7 +39,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore diff --git a/packages/google-cloud-dataplex/CHANGELOG.md b/packages/google-cloud-dataplex/CHANGELOG.md index 1528c4fa47aa..5e8c831ba14e 100644 --- a/packages/google-cloud-dataplex/CHANGELOG.md +++ b/packages/google-cloud-dataplex/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [1.6.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v1.6.2...google-cloud-dataplex-v1.6.3) (2023-09-19) + + +### Documentation + +* Minor formatting ([9487380](https://github.com/googleapis/google-cloud-python/commit/94873808ece8059b07644a0a49dedf8e2906900a)) + +## [1.6.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v1.6.1...google-cloud-dataplex-v1.6.2) (2023-08-09) + + +### Bug Fixes + +* remove unused annotation in results_table ([#11548](https://github.com/googleapis/google-cloud-python/issues/11548)) ([840fc36](https://github.com/googleapis/google-cloud-python/commit/840fc369045a16000ba876eebbeb1e0b5d1ee1d8)) + ## [1.6.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v1.6.0...google-cloud-dataplex-v1.6.1) (2023-08-03) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py index d94d4dea4d20..fc429472f4c9 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.1" # {x-release-please-version} +__version__ = "1.6.3" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py index d94d4dea4d20..fc429472f4c9 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.1" # {x-release-please-version} +__version__ = "1.6.3" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py index 4205d9f40fe1..fe34d154e6c4 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py @@ -45,7 +45,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -704,8 +704,8 @@ async def sample_get_iam_policy(): The request object. Request message for ``GetIamPolicy`` method. resource (:class:`str`): REQUIRED: The resource for which the - policy is being requested. - See the operation documentation for the + policy is being requested. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -1078,6 +1078,7 @@ async def sample_list_content(): Returns: google.cloud.dataplex_v1.services.content_service.pagers.ListContentAsyncPager: List content response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py index 6c36b6146a1b..2d66621b6d86 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py @@ -49,7 +49,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -944,8 +944,8 @@ def sample_get_iam_policy(): The request object. Request message for ``GetIamPolicy`` method. resource (str): REQUIRED: The resource for which the - policy is being requested. - See the operation documentation for the + policy is being requested. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -1295,6 +1295,7 @@ def sample_list_content(): Returns: google.cloud.dataplex_v1.services.content_service.pagers.ListContentPager: List content response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/base.py index b865a478092b..b79cc4e5d7c2 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/base.py @@ -25,7 +25,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py index d32ddec8d975..db2e9f742497 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py @@ -23,7 +23,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py index 8b0fc4f377ac..60c21e55f3a6 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py @@ -22,7 +22,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py index 9756884ef5dc..963349ee405a 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -87,8 +87,6 @@ class DataScanServiceAsyncClient: ) entity_path = staticmethod(DataScanServiceClient.entity_path) parse_entity_path = staticmethod(DataScanServiceClient.parse_entity_path) - table_path = staticmethod(DataScanServiceClient.table_path) - parse_table_path = staticmethod(DataScanServiceClient.parse_table_path) common_billing_account_path = staticmethod( DataScanServiceClient.common_billing_account_path ) @@ -840,6 +838,7 @@ async def sample_list_data_scans(): Returns: google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansAsyncPager: List dataScans response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1164,6 +1163,7 @@ async def sample_list_data_scan_jobs(): Returns: google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsAsyncPager: List DataScanJobs response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py index ee8d4f2f2281..1c1c7316b432 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -268,28 +268,6 @@ def parse_entity_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} - @staticmethod - def table_path( - project: str, - dataset: str, - table: str, - ) -> str: - """Returns a fully-qualified table string.""" - return "projects/{project}/datasets/{dataset}/tables/{table}".format( - project=project, - dataset=dataset, - table=table, - ) - - @staticmethod - def parse_table_path(path: str) -> Dict[str, str]: - """Parses a table path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/datasets/(?P.+?)/tables/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - @staticmethod def common_billing_account_path( billing_account: str, @@ -1131,6 +1109,7 @@ def sample_list_data_scans(): Returns: google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScansPager: List dataScans response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1455,6 +1434,7 @@ def sample_list_data_scan_jobs(): Returns: google.cloud.dataplex_v1.services.data_scan_service.pagers.ListDataScanJobsPager: List DataScanJobs response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py index 2ed765d42d79..0fa443d877d4 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -701,6 +701,7 @@ async def sample_list_data_taxonomies(): Returns: google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesAsyncPager: List DataTaxonomies response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1330,6 +1331,7 @@ async def sample_list_data_attribute_bindings(): Returns: google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsAsyncPager: List DataAttributeBindings response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1943,6 +1945,7 @@ async def sample_list_data_attributes(): Returns: google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesAsyncPager: List DataAttributes response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py index 01edb8013a9d..656263988ccf 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -961,6 +961,7 @@ def sample_list_data_taxonomies(): Returns: google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataTaxonomiesPager: List DataTaxonomies response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1596,6 +1597,7 @@ def sample_list_data_attribute_bindings(): Returns: google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributeBindingsPager: List DataAttributeBindings response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -2213,6 +2215,7 @@ def sample_list_data_attributes(): Returns: google.cloud.dataplex_v1.services.data_taxonomy_service.pagers.ListDataAttributesPager: List DataAttributes response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py index 5f1624c54220..2245b8fe3bb4 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -715,6 +715,7 @@ async def sample_list_lakes(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesAsyncPager: List lakes response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -961,6 +962,7 @@ async def sample_list_lake_actions(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsAsyncPager: List actions response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1492,6 +1494,7 @@ async def sample_list_zones(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesAsyncPager: List zones response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1730,6 +1733,7 @@ async def sample_list_zone_actions(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsAsyncPager: List actions response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -2251,6 +2255,7 @@ async def sample_list_assets(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsAsyncPager: List assets response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -2484,6 +2489,7 @@ async def sample_list_asset_actions(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsAsyncPager: List actions response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -3001,6 +3007,7 @@ async def sample_list_tasks(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksAsyncPager: List tasks response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -3231,6 +3238,7 @@ async def sample_list_jobs(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsAsyncPager: List jobs response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -4048,6 +4056,7 @@ async def sample_list_environments(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsAsyncPager: List environments response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -4281,6 +4290,7 @@ async def sample_list_sessions(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsAsyncPager: List sessions response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py index abc4137ffc25..155a854108f7 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -1106,6 +1106,7 @@ def sample_list_lakes(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakesPager: List lakes response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1334,6 +1335,7 @@ def sample_list_lake_actions(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListLakeActionsPager: List actions response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1856,6 +1858,7 @@ def sample_list_zones(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZonesPager: List zones response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -2076,6 +2079,7 @@ def sample_list_zone_actions(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListZoneActionsPager: List actions response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -2588,6 +2592,7 @@ def sample_list_assets(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetsPager: List assets response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -2803,6 +2808,7 @@ def sample_list_asset_actions(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListAssetActionsPager: List actions response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -3311,6 +3317,7 @@ def sample_list_tasks(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListTasksPager: List tasks response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -3523,6 +3530,7 @@ def sample_list_jobs(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListJobsPager: List jobs response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -4322,6 +4330,7 @@ def sample_list_environments(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsPager: List environments response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -4537,6 +4546,7 @@ def sample_list_sessions(): Returns: google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsPager: List sessions response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py index 0ecd3486a7ea..e406eace3bea 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py @@ -45,7 +45,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.dataplex_v1.services.metadata_service import pagers @@ -700,6 +700,7 @@ async def sample_list_entities(): Returns: google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesAsyncPager: List metadata entities response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1141,6 +1142,7 @@ async def sample_list_partitions(): Returns: google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsAsyncPager: List metadata partitions response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py index b5f5465370bd..95edb406dec9 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py @@ -49,7 +49,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.dataplex_v1.services.metadata_service import pagers @@ -973,6 +973,7 @@ def sample_list_entities(): Returns: google.cloud.dataplex_v1.services.metadata_service.pagers.ListEntitiesPager: List metadata entities response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1396,6 +1397,7 @@ def sample_list_partitions(): Returns: google.cloud.dataplex_v1.services.metadata_service.pagers.ListPartitionsPager: List metadata partitions response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/base.py index 50e32bec36c9..729be3b12ce7 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/base.py @@ -25,7 +25,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py index 69f497934fce..616f55747bfe 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py @@ -23,7 +23,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py index d330933a440d..cb4efcb8719a 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py @@ -22,7 +22,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py index cd3d4020b0a0..60780448653f 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py @@ -81,10 +81,9 @@ class BigQueryExport(proto.Message): Attributes: results_table (str): - Optional. The BigQuery table to export - DataProfileScan results to. Format: - - projects/{project}/datasets/{dataset}/tables/{table} + Optional. The BigQuery table to export DataProfileScan + results to. Format: + //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID """ results_table: str = proto.Field( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py index 4711d282a42e..1afc0075077e 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py @@ -76,10 +76,9 @@ class BigQueryExport(proto.Message): Attributes: results_table (str): - Optional. The BigQuery table to export - DataQualityScan results to. Format: - - projects/{project}/datasets/{dataset}/tables/{table} + Optional. The BigQuery table to export DataQualityScan + results to. Format: + //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID """ results_table: str = proto.Field( @@ -551,6 +550,7 @@ class ColumnStatistic(proto.Enum): class RowConditionExpectation(proto.Message): r"""Evaluates whether each row passes the specified condition. + The SQL expression needs to use BigQuery standard SQL syntax and should produce a boolean value per row as the result. @@ -568,6 +568,7 @@ class RowConditionExpectation(proto.Message): class TableConditionExpectation(proto.Message): r"""Evaluates whether the provided expression is true. + The SQL expression needs to use BigQuery standard SQL syntax and should produce a scalar boolean result. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py index a11679da465f..92a535033804 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py @@ -189,12 +189,12 @@ class DataAttribute(proto.Message): has an up-to-date value before proceeding. resource_access_spec (google.cloud.dataplex_v1.types.ResourceAccessSpec): Optional. Specified when applied to a - resource (eg: Cloud Storage bucket, - BigQuery dataset, BigQuery table). + resource (eg: Cloud Storage bucket, BigQuery + dataset, BigQuery table). data_access_spec (google.cloud.dataplex_v1.types.DataAccessSpec): Optional. Specified when applied to data - stored on the resource (eg: rows, - columns in BigQuery Tables). + stored on the resource (eg: rows, columns in + BigQuery Tables). """ name: str = proto.Field( @@ -254,8 +254,7 @@ class DataAttribute(proto.Message): class DataAttributeBinding(proto.Message): r"""DataAttributeBinding represents binding of attributes to - resources. Eg: Bind - 'CustomerInfo' entity with 'PII' attribute. + resources. Eg: Bind 'CustomerInfo' entity with 'PII' attribute. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py index 792b14e7cd49..3ab3728cba62 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py @@ -495,6 +495,7 @@ class DataScan(proto.Message): Required. The data source for DataScan. execution_spec (google.cloud.dataplex_v1.types.DataScan.ExecutionSpec): Optional. DataScan execution settings. + If not specified, the fields in it will use their default values. execution_status (google.cloud.dataplex_v1.types.DataScan.ExecutionStatus): diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json index 3118759d6c12..e975ad18d961 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataplex", - "version": "1.6.1" + "version": "1.6.3" }, "snippets": [ { diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py index dde80c15c96a..b878f4b5126e 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py @@ -34,7 +34,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index 9b731587ad76..cf3eaa5058bb 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -43,7 +43,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -3654,34 +3654,8 @@ def test_parse_entity_path(): assert expected == actual -def test_table_path(): - project = "squid" - dataset = "clam" - table = "whelk" - expected = "projects/{project}/datasets/{dataset}/tables/{table}".format( - project=project, - dataset=dataset, - table=table, - ) - actual = DataScanServiceClient.table_path(project, dataset, table) - assert expected == actual - - -def test_parse_table_path(): - expected = { - "project": "octopus", - "dataset": "oyster", - "table": "nudibranch", - } - path = DataScanServiceClient.table_path(**expected) - - # Check that the path construction is reversible. - actual = DataScanServiceClient.parse_table_path(path) - assert expected == actual - - def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3691,7 +3665,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "clam", } path = DataScanServiceClient.common_billing_account_path(**expected) @@ -3701,7 +3675,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -3711,7 +3685,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "octopus", } path = DataScanServiceClient.common_folder_path(**expected) @@ -3721,7 +3695,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -3731,7 +3705,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "nudibranch", } path = DataScanServiceClient.common_organization_path(**expected) @@ -3741,7 +3715,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -3751,7 +3725,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "mussel", } path = DataScanServiceClient.common_project_path(**expected) @@ -3761,8 +3735,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -3773,8 +3747,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "scallop", + "location": "abalone", } path = DataScanServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py index 21f5e43c48b7..22cc825e0515 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py @@ -43,7 +43,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py index 394d5ffaf8e5..53dfbac0aa6e 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py @@ -43,7 +43,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py index 70c98e51a538..29dcf7ab96ed 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py @@ -34,7 +34,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import timestamp_pb2 # type: ignore import grpc diff --git a/packages/google-cloud-dataproc-metastore/.flake8 b/packages/google-cloud-dataproc-metastore/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-dataproc-metastore/.flake8 +++ b/packages/google-cloud-dataproc-metastore/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-dataproc-metastore/CHANGELOG.md b/packages/google-cloud-dataproc-metastore/CHANGELOG.md index a0f2965f16be..b7f935507427 100644 --- a/packages/google-cloud-dataproc-metastore/CHANGELOG.md +++ b/packages/google-cloud-dataproc-metastore/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.12.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-metastore-v1.12.0...google-cloud-dataproc-metastore-v1.12.1) (2023-09-19) + + +### Documentation + +* Minor formatting ([#11632](https://github.com/googleapis/google-cloud-python/issues/11632)) ([dbee08f](https://github.com/googleapis/google-cloud-python/commit/dbee08f2df63e1906ba13b0d3060eec5a80c79e2)) + ## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-metastore-v1.11.0...google-cloud-dataproc-metastore-v1.12.0) (2023-07-05) diff --git a/packages/google-cloud-dataproc-metastore/CONTRIBUTING.rst b/packages/google-cloud-dataproc-metastore/CONTRIBUTING.rst index af03b8fbad3f..5ef2cfd86ee7 100644 --- a/packages/google-cloud-dataproc-metastore/CONTRIBUTING.rst +++ b/packages/google-cloud-dataproc-metastore/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-dataproc-metastore/noxfile.py ********** diff --git a/packages/google-cloud-dataproc-metastore/MANIFEST.in b/packages/google-cloud-dataproc-metastore/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-dataproc-metastore/MANIFEST.in +++ b/packages/google-cloud-dataproc-metastore/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-dataproc-metastore/README.rst b/packages/google-cloud-dataproc-metastore/README.rst index 728ca4735645..ad2c5cf882b1 100644 --- a/packages/google-cloud-dataproc-metastore/README.rst +++ b/packages/google-cloud-dataproc-metastore/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-dataproc-metastore + pip install google-cloud-dataproc-metastore Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-dataproc-metastore + py -m venv + .\\Scripts\activate + pip install google-cloud-dataproc-metastore Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-dataproc-metastore/docs/conf.py b/packages/google-cloud-dataproc-metastore/docs/conf.py index 6eadb3b50a0d..56bf5ae24b46 100644 --- a/packages/google-cloud-dataproc-metastore/docs/conf.py +++ b/packages/google-cloud-dataproc-metastore/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py index 360a0d13ebdd..ac3bc60c3fb3 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py index 360a0d13ebdd..ac3bc60c3fb3 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py index 33bcd6491f6f..ebae3c7200f2 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -550,6 +550,7 @@ async def sample_create_service(): service, which is used as the final component of the metastore service's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and @@ -1182,6 +1183,7 @@ async def sample_create_metadata_import(): Required. The ID of the metadata import, which is used as the final component of the metadata import's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist @@ -1912,6 +1914,7 @@ async def sample_create_backup(): Required. The ID of the backup, which is used as the final component of the backup's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py index 75c64dcbb293..19f99030e368 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -858,6 +858,7 @@ def sample_create_service(): service, which is used as the final component of the metastore service's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and @@ -1490,6 +1491,7 @@ def sample_create_metadata_import(): Required. The ID of the metadata import, which is used as the final component of the metadata import's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist @@ -2221,6 +2223,7 @@ def sample_create_backup(): Required. The ID of the backup, which is used as the final component of the backup's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/transports/rest.py index 531765e681b1..750597d3ea3e 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/transports/rest.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py index 73c96ac7fd48..851f05eec8b0 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -308,6 +308,7 @@ async def sample_list_federations(): Returns: google.cloud.metastore_v1.services.dataproc_metastore_federation.pagers.ListFederationsAsyncPager: Response message for ListFederations + Iterating over this object will yield results and resolve additional pages automatically. @@ -543,6 +544,7 @@ async def sample_create_federation(): federation, which is used as the final component of the metastore federation's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py index cd6379570109..2181b4258b58 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -534,6 +534,7 @@ def sample_list_federations(): Returns: google.cloud.metastore_v1.services.dataproc_metastore_federation.pagers.ListFederationsPager: Response message for ListFederations + Iterating over this object will yield results and resolve additional pages automatically. @@ -769,6 +770,7 @@ def sample_create_federation(): federation, which is used as the final component of the metastore federation's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/transports/rest.py index 305652665513..c67aa95de485 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/transports/rest.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore.py index b29c71a90f96..63b5978d0319 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore.py @@ -1263,6 +1263,7 @@ class CreateServiceRequest(proto.Message): Required. The ID of the metastore service, which is used as the final component of the metastore service's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and consist of alpha-numeric @@ -1523,6 +1524,7 @@ class CreateMetadataImportRequest(proto.Message): Required. The ID of the metadata import, which is used as the final component of the metadata import's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist of alpha-numeric ASCII diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore_federation.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore_federation.py index 9c5e5a6626f6..18d32f89f443 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore_federation.py @@ -225,8 +225,10 @@ class ListFederationsRequest(proto.Message): Optional. A page token, received from a previous ListFederationServices call. Provide this token to retrieve the subsequent page. + To retrieve the first page, supply an empty page token. + When paginating, other parameters provided to ListFederationServices must match the call that provided the page token. @@ -326,6 +328,7 @@ class CreateFederationRequest(proto.Message): Required. The ID of the metastore federation, which is used as the final component of the metastore federation's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and consist of alpha-numeric diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py index 360a0d13ebdd..ac3bc60c3fb3 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py index 30d8e163ff1c..8d28f4ffcac1 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -552,6 +552,7 @@ async def sample_create_service(): service, which is used as the final component of the metastore service's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and @@ -1184,6 +1185,7 @@ async def sample_create_metadata_import(): Required. The ID of the metadata import, which is used as the final component of the metadata import's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist @@ -1914,6 +1916,7 @@ async def sample_create_backup(): Required. The ID of the backup, which is used as the final component of the backup's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py index 0f226bc1d55d..4bc01c33efe2 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -880,6 +880,7 @@ def sample_create_service(): service, which is used as the final component of the metastore service's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and @@ -1512,6 +1513,7 @@ def sample_create_metadata_import(): Required. The ID of the metadata import, which is used as the final component of the metadata import's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist @@ -2243,6 +2245,7 @@ def sample_create_backup(): Required. The ID of the backup, which is used as the final component of the backup's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/rest.py index 494f6487714e..4b23ad7b7a44 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/rest.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py index a7102f7e77bb..8293b6f83fdc 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -308,6 +308,7 @@ async def sample_list_federations(): Returns: google.cloud.metastore_v1alpha.services.dataproc_metastore_federation.pagers.ListFederationsAsyncPager: Response message for ListFederations + Iterating over this object will yield results and resolve additional pages automatically. @@ -543,6 +544,7 @@ async def sample_create_federation(): federation, which is used as the final component of the metastore federation's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py index 80406db5eb4a..f2fef4bc01db 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -534,6 +534,7 @@ def sample_list_federations(): Returns: google.cloud.metastore_v1alpha.services.dataproc_metastore_federation.pagers.ListFederationsPager: Response message for ListFederations + Iterating over this object will yield results and resolve additional pages automatically. @@ -769,6 +770,7 @@ def sample_create_federation(): federation, which is used as the final component of the metastore federation's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/transports/rest.py index b350414f5cda..70982f2cd15e 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/transports/rest.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore.py index 1fac7622d65f..5c24b733a5eb 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore.py @@ -1364,6 +1364,7 @@ class CreateServiceRequest(proto.Message): Required. The ID of the metastore service, which is used as the final component of the metastore service's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and consist of alpha-numeric @@ -1624,6 +1625,7 @@ class CreateMetadataImportRequest(proto.Message): Required. The ID of the metadata import, which is used as the final component of the metadata import's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist of alpha-numeric ASCII diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore_federation.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore_federation.py index 02a5b7fe8529..74f2f73a7f73 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore_federation.py @@ -228,8 +228,10 @@ class ListFederationsRequest(proto.Message): Optional. A page token, received from a previous ListFederationServices call. Provide this token to retrieve the subsequent page. + To retrieve the first page, supply an empty page token. + When paginating, other parameters provided to ListFederationServices must match the call that provided the page token. @@ -329,6 +331,7 @@ class CreateFederationRequest(proto.Message): Required. The ID of the metastore federation, which is used as the final component of the metastore federation's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and consist of alpha-numeric diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py index 360a0d13ebdd..ac3bc60c3fb3 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py index f6e521254cab..1c59caab3975 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -552,6 +552,7 @@ async def sample_create_service(): service, which is used as the final component of the metastore service's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and @@ -1184,6 +1185,7 @@ async def sample_create_metadata_import(): Required. The ID of the metadata import, which is used as the final component of the metadata import's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist @@ -1914,6 +1916,7 @@ async def sample_create_backup(): Required. The ID of the backup, which is used as the final component of the backup's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py index f93ae1011886..3f75aa0bf478 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -880,6 +880,7 @@ def sample_create_service(): service, which is used as the final component of the metastore service's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and @@ -1512,6 +1513,7 @@ def sample_create_metadata_import(): Required. The ID of the metadata import, which is used as the final component of the metadata import's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist @@ -2243,6 +2245,7 @@ def sample_create_backup(): Required. The ID of the backup, which is used as the final component of the backup's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/rest.py index 8710b246e8ed..882e50192ae2 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/rest.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py index 441be3430ac3..1adcc9dcef6c 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -308,6 +308,7 @@ async def sample_list_federations(): Returns: google.cloud.metastore_v1beta.services.dataproc_metastore_federation.pagers.ListFederationsAsyncPager: Response message for ListFederations + Iterating over this object will yield results and resolve additional pages automatically. @@ -543,6 +544,7 @@ async def sample_create_federation(): federation, which is used as the final component of the metastore federation's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py index 0f8f827a3819..92aceca4dfca 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -534,6 +534,7 @@ def sample_list_federations(): Returns: google.cloud.metastore_v1beta.services.dataproc_metastore_federation.pagers.ListFederationsPager: Response message for ListFederations + Iterating over this object will yield results and resolve additional pages automatically. @@ -769,6 +770,7 @@ def sample_create_federation(): federation, which is used as the final component of the metastore federation's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/transports/rest.py index 40229ced1351..feba135822f6 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/transports/rest.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore.py index ca92e623842e..23269a0f5359 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore.py @@ -1364,6 +1364,7 @@ class CreateServiceRequest(proto.Message): Required. The ID of the metastore service, which is used as the final component of the metastore service's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and consist of alpha-numeric @@ -1624,6 +1625,7 @@ class CreateMetadataImportRequest(proto.Message): Required. The ID of the metadata import, which is used as the final component of the metadata import's name. + This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist of alpha-numeric ASCII diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore_federation.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore_federation.py index c96e72ca341d..736c9ff3ccab 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore_federation.py @@ -228,8 +228,10 @@ class ListFederationsRequest(proto.Message): Optional. A page token, received from a previous ListFederationServices call. Provide this token to retrieve the subsequent page. + To retrieve the first page, supply an empty page token. + When paginating, other parameters provided to ListFederationServices must match the call that provided the page token. @@ -329,6 +331,7 @@ class CreateFederationRequest(proto.Message): Required. The ID of the metastore federation, which is used as the final component of the metastore federation's name. + This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and consist of alpha-numeric diff --git a/packages/google-cloud-dataproc-metastore/noxfile.py b/packages/google-cloud-dataproc-metastore/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-dataproc-metastore/noxfile.py +++ b/packages/google-cloud-dataproc-metastore/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json index 61145e9be3f7..631c343ce1b8 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "0.1.0" + "version": "1.12.1" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json index 96abac734738..6316721caf5a 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "0.1.0" + "version": "1.12.1" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json index 4a03aa222378..b2696ee9c288 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "0.1.0" + "version": "1.12.1" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/scripts/decrypt-secrets.sh b/packages/google-cloud-dataproc-metastore/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-dataproc-metastore/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-dataproc-metastore/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py index 46854e2ee2ac..93a076e82a91 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py @@ -45,7 +45,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py index d60837df08b0..16aa7c87d132 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py @@ -45,7 +45,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py index 6ce87dadb122..ff14c6c986f5 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py @@ -45,7 +45,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py index 9601b825980f..ce589cfbedf4 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py @@ -45,7 +45,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py index 4c7c9a021e27..4370f8a558d6 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py @@ -45,7 +45,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py index c465e7d6a103..a3b0eb5c24cf 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py @@ -45,7 +45,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/google-cloud-datastream/.flake8 b/packages/google-cloud-datastream/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-datastream/.flake8 +++ b/packages/google-cloud-datastream/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastream/CHANGELOG.md b/packages/google-cloud-datastream/CHANGELOG.md index ebee324b37a3..bef002ca5976 100644 --- a/packages/google-cloud-datastream/CHANGELOG.md +++ b/packages/google-cloud-datastream/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.7.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datastream-v1.7.0...google-cloud-datastream-v1.7.1) (2023-09-19) + + +### Documentation + +* Minor formatting ([#11632](https://github.com/googleapis/google-cloud-python/issues/11632)) ([dbee08f](https://github.com/googleapis/google-cloud-python/commit/dbee08f2df63e1906ba13b0d3060eec5a80c79e2)) + ## [1.7.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datastream-v1.6.1...google-cloud-datastream-v1.7.0) (2023-07-24) diff --git a/packages/google-cloud-datastream/CONTRIBUTING.rst b/packages/google-cloud-datastream/CONTRIBUTING.rst index b958c8ef6bbf..785aa70f5fd8 100644 --- a/packages/google-cloud-datastream/CONTRIBUTING.rst +++ b/packages/google-cloud-datastream/CONTRIBUTING.rst @@ -236,7 +236,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/google-cloud-python/blob/main/noxfile.py +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-datastream/noxfile.py ********** diff --git a/packages/google-cloud-datastream/MANIFEST.in b/packages/google-cloud-datastream/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-datastream/MANIFEST.in +++ b/packages/google-cloud-datastream/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastream/README.rst b/packages/google-cloud-datastream/README.rst index 17b56d7649b4..2f4a347a0bdd 100644 --- a/packages/google-cloud-datastream/README.rst +++ b/packages/google-cloud-datastream/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-datastream + pip install google-cloud-datastream Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-datastream + py -m venv + .\\Scripts\activate + pip install google-cloud-datastream Next Steps ~~~~~~~~~~ diff --git a/packages/google-cloud-datastream/docs/conf.py b/packages/google-cloud-datastream/docs/conf.py index d8d9273bc7b2..37fac0cd235e 100644 --- a/packages/google-cloud-datastream/docs/conf.py +++ b/packages/google-cloud-datastream/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py index 181f5fd08a41..009753866165 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.0" # {x-release-please-version} +__version__ = "1.7.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py index 181f5fd08a41..009753866165 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.0" # {x-release-please-version} +__version__ = "1.7.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py index b342e880c51d..dff1135c944d 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -1019,6 +1019,7 @@ async def sample_list_streams(): Returns: google.cloud.datastream_v1.services.datastream.pagers.ListStreamsAsyncPager: Response message for listing streams. + Iterating over this object will yield results and resolve additional pages automatically. @@ -3010,6 +3011,7 @@ async def sample_list_routes(): Returns: google.cloud.datastream_v1.services.datastream.pagers.ListRoutesAsyncPager: Route list response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py index 1ae86b8244fc..c9d4b492f9f3 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -1358,6 +1358,7 @@ def sample_list_streams(): Returns: google.cloud.datastream_v1.services.datastream.pagers.ListStreamsPager: Response message for listing streams. + Iterating over this object will yield results and resolve additional pages automatically. @@ -3354,6 +3355,7 @@ def sample_list_routes(): Returns: google.cloud.datastream_v1.services.datastream.pagers.ListRoutesPager: Route list response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py index a52847aca374..e203205811b0 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream.py index 878b7b7eb09a..393f4c92e33f 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream.py @@ -379,6 +379,7 @@ class CreateConnectionProfileRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -387,6 +388,7 @@ class CreateConnectionProfileRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -446,6 +448,7 @@ class UpdateConnectionProfileRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -454,6 +457,7 @@ class UpdateConnectionProfileRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -504,6 +508,7 @@ class DeleteConnectionProfileRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -512,6 +517,7 @@ class DeleteConnectionProfileRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -640,6 +646,7 @@ class CreateStreamRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -648,6 +655,7 @@ class CreateStreamRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -706,6 +714,7 @@ class UpdateStreamRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -714,6 +723,7 @@ class UpdateStreamRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -764,6 +774,7 @@ class DeleteStreamRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -772,6 +783,7 @@ class DeleteStreamRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1045,6 +1057,7 @@ class CreatePrivateConnectionRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1053,6 +1066,7 @@ class CreatePrivateConnectionRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1183,6 +1197,7 @@ class DeletePrivateConnectionRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1191,6 +1206,7 @@ class DeletePrivateConnectionRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1247,6 +1263,7 @@ class CreateRouteRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1255,6 +1272,7 @@ class CreateRouteRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1374,6 +1392,7 @@ class DeleteRouteRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1382,6 +1401,7 @@ class DeleteRouteRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream_resources.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream_resources.py index 877fb943db16..b31ef44f201b 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream_resources.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream_resources.py @@ -1028,7 +1028,6 @@ class MysqlColumn(proto.Message): data_type (str): The MySQL data type. Full data types list can be found here: - https://dev.mysql.com/doc/refman/8.0/en/data-types.html length (int): Column length. @@ -1578,6 +1577,7 @@ class State(proto.Enum): The stream is paused. MAINTENANCE (4): The stream is in maintenance mode. + Updates are rejected on the resource in this state. FAILED (5): diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py index 181f5fd08a41..009753866165 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.0" # {x-release-please-version} +__version__ = "1.7.1" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py index 998fc95bf386..e2ea55a115ad 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/async_client.py @@ -2558,6 +2558,7 @@ async def sample_list_routes(): Returns: google.cloud.datastream_v1alpha1.services.datastream.pagers.ListRoutesAsyncPager: route list response + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py index df0c90425268..817dc1d74d73 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py @@ -2863,6 +2863,7 @@ def sample_list_routes(): Returns: google.cloud.datastream_v1alpha1.services.datastream.pagers.ListRoutesPager: route list response + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/types/datastream.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/types/datastream.py index e0a75b302a8d..eab91925066c 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/types/datastream.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/types/datastream.py @@ -380,6 +380,7 @@ class CreateConnectionProfileRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -388,6 +389,7 @@ class CreateConnectionProfileRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -432,6 +434,7 @@ class UpdateConnectionProfileRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -440,6 +443,7 @@ class UpdateConnectionProfileRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -475,6 +479,7 @@ class DeleteConnectionProfileRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -483,6 +488,7 @@ class DeleteConnectionProfileRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -611,6 +617,7 @@ class CreateStreamRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -619,6 +626,7 @@ class CreateStreamRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -677,6 +685,7 @@ class UpdateStreamRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -685,6 +694,7 @@ class UpdateStreamRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -735,6 +745,7 @@ class DeleteStreamRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -743,6 +754,7 @@ class DeleteStreamRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -848,6 +860,7 @@ class CreatePrivateConnectionRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -856,6 +869,7 @@ class CreatePrivateConnectionRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -978,6 +992,7 @@ class DeletePrivateConnectionRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -986,6 +1001,7 @@ class DeletePrivateConnectionRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1042,6 +1058,7 @@ class CreateRouteRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1050,6 +1067,7 @@ class CreateRouteRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1169,6 +1187,7 @@ class DeleteRouteRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1177,6 +1196,7 @@ class DeleteRouteRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/types/datastream_resources.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/types/datastream_resources.py index fff92a454b4d..1ac82736e1b5 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/types/datastream_resources.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/types/datastream_resources.py @@ -766,7 +766,6 @@ class MysqlColumn(proto.Message): data_type (str): The MySQL data type. Full data types list can be found here: - https://dev.mysql.com/doc/refman/8.0/en/data-types.html length (int): Column length. @@ -1127,6 +1126,7 @@ class State(proto.Enum): The stream is paused. MAINTENANCE (4): The stream is in maintenance mode. + Updates are rejected on the resource in this state. FAILED (5): diff --git a/packages/google-cloud-datastream/noxfile.py b/packages/google-cloud-datastream/noxfile.py index 6f5debd52f23..9a2acd8b6787 100644 --- a/packages/google-cloud-datastream/noxfile.py +++ b/packages/google-cloud-datastream/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json index 6ab2078cbf7e..c1409b0e97e9 100644 --- a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json +++ b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datastream", - "version": "1.7.0" + "version": "1.7.1" }, "snippets": [ { diff --git a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json index afdcf7828409..e828e6a4ffcc 100644 --- a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json +++ b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datastream", - "version": "1.7.0" + "version": "1.7.1" }, "snippets": [ { diff --git a/packages/google-cloud-datastream/scripts/decrypt-secrets.sh b/packages/google-cloud-datastream/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-datastream/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-datastream/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py index 3f4554734ba6..3d6e40dbf9c3 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py @@ -45,7 +45,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py index 400fd46318fc..95153ce0a97d 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py @@ -41,7 +41,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-deploy/CHANGELOG.md b/packages/google-cloud-deploy/CHANGELOG.md index 540ca06d1ba6..9ea33be939d2 100644 --- a/packages/google-cloud-deploy/CHANGELOG.md +++ b/packages/google-cloud-deploy/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.12.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-deploy-v1.12.0...google-cloud-deploy-v1.12.1) (2023-09-19) + + +### Documentation + +* Minor formatting ([#11632](https://github.com/googleapis/google-cloud-python/issues/11632)) ([dbee08f](https://github.com/googleapis/google-cloud-python/commit/dbee08f2df63e1906ba13b0d3060eec5a80c79e2)) + ## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-deploy-v1.11.1...google-cloud-deploy-v1.12.0) (2023-08-03) diff --git a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py index 18a7732658ee..ac3bc60c3fb3 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py index 18a7732658ee..ac3bc60c3fb3 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.12.1" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py index 17a264df09d8..d9faf23eade3 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/async_client.py @@ -47,7 +47,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py index 76b272960f9d..66ca58325c7c 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py @@ -51,7 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py index 3bf8ec5683bd..051faabe864e 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.protobuf import json_format import grpc # type: ignore from requests import __version__ as requests_version diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py index 23c441d44dda..7606c1890dad 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py @@ -919,8 +919,7 @@ class ListDeliveryPipelinesRequest(proto.Message): call that provided the page token. filter (str): Filter pipelines to be returned. See - https://google.aip.dev/160 for more - details. + https://google.aip.dev/160 for more details. order_by (str): Field to sort by. See https://google.aip.dev/132#ordering for more @@ -1016,6 +1015,7 @@ class CreateDeliveryPipelineRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1024,6 +1024,7 @@ class CreateDeliveryPipelineRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1076,6 +1077,7 @@ class UpdateDeliveryPipelineRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1084,6 +1086,7 @@ class UpdateDeliveryPipelineRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1136,6 +1139,7 @@ class DeleteDeliveryPipelineRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1144,6 +1148,7 @@ class DeleteDeliveryPipelineRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1633,12 +1638,11 @@ class ListTargetsRequest(proto.Message): call that provided the page token. filter (str): Optional. Filter targets to be returned. See - https://google.aip.dev/160 for - more details. + https://google.aip.dev/160 for more details. order_by (str): Optional. Field to sort by. See - https://google.aip.dev/132#ordering for - more details. + https://google.aip.dev/132#ordering for more + details. """ parent: str = proto.Field( @@ -1730,6 +1734,7 @@ class CreateTargetRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1738,6 +1743,7 @@ class CreateTargetRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1790,6 +1796,7 @@ class UpdateTargetRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1798,6 +1805,7 @@ class UpdateTargetRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -1849,6 +1857,7 @@ class DeleteTargetRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -1857,6 +1866,7 @@ class DeleteTargetRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -2467,12 +2477,11 @@ class ListReleasesRequest(proto.Message): call that provided the page token. filter (str): Optional. Filter releases to be returned. See - https://google.aip.dev/160 - for more details. + https://google.aip.dev/160 for more details. order_by (str): Optional. Field to sort by. See - https://google.aip.dev/132#ordering for - more details. + https://google.aip.dev/132#ordering for more + details. """ parent: str = proto.Field( @@ -2564,6 +2573,7 @@ class CreateReleaseRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -2572,6 +2582,7 @@ class CreateReleaseRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -3298,12 +3309,11 @@ class ListRolloutsRequest(proto.Message): call that provided the page token. filter (str): Optional. Filter rollouts to be returned. See - https://google.aip.dev/160 - for more details. + https://google.aip.dev/160 for more details. order_by (str): Optional. Field to sort by. See - https://google.aip.dev/132#ordering for - more details. + https://google.aip.dev/132#ordering for more + details. """ parent: str = proto.Field( @@ -3397,6 +3407,7 @@ class CreateRolloutRequest(proto.Message): ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. + For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request @@ -3405,6 +3416,7 @@ class CreateRolloutRequest(proto.Message): so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. + The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). @@ -4188,12 +4200,11 @@ class ListJobRunsRequest(proto.Message): call that provided the page token. filter (str): Optional. Filter results to be returned. See - https://google.aip.dev/160 for - more details. + https://google.aip.dev/160 for more details. order_by (str): Optional. Field to sort by. See - https://google.aip.dev/132#ordering for - more details. + https://google.aip.dev/132#ordering for more + details. """ parent: str = proto.Field( diff --git a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index cd3f7b72c527..18749d920cb6 100644 --- a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-deploy", - "version": "1.12.0" + "version": "1.12.1" }, "snippets": [ { diff --git a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py index e8d24fc79546..2cec23f15440 100644 --- a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py +++ b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py @@ -45,7 +45,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/google-cloud-dialogflow-cx/.OwlBot.yaml b/packages/google-cloud-dialogflow-cx/.OwlBot.yaml new file mode 100644 index 000000000000..ff348289b10a --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/.OwlBot.yaml @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/dialogflow/cx/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-dialogflow-cx/$1 + +begin-after-commit-hash: be977610daf8d4ae6259ed2baf5e5e36c4d32f0b + diff --git a/packages/google-cloud-dialogflow-cx/.coveragerc b/packages/google-cloud-dialogflow-cx/.coveragerc new file mode 100644 index 000000000000..8dcabc09ff97 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/dialogflowcx/__init__.py + google/cloud/dialogflowcx/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-dialogflow-cx/.flake8 b/packages/google-cloud-dialogflow-cx/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-dialogflow-cx/.gitignore b/packages/google-cloud-dialogflow-cx/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-dialogflow-cx/.repo-metadata.json b/packages/google-cloud-dialogflow-cx/.repo-metadata.json new file mode 100644 index 000000000000..6cb5e513eaba --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/.repo-metadata.json @@ -0,0 +1,16 @@ +{ + "name": "dialogflow-cx", + "name_pretty": "Dialogflow CX", + "product_documentation": "https://cloud.google.com/dialogflow/cx/docs", + "client_documentation": "https://cloud.google.com/python/docs/reference/dialogflow-cx/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5300385", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-dialogflow-cx", + "api_id": "dialogflow.googleapis.com", + "default_version": "v3", + "codeowner_team": "@googleapis/cdpe-cloudai", + "api_shortname": "dialogflow" +} diff --git a/packages/google-cloud-dialogflow-cx/CHANGELOG.md b/packages/google-cloud-dialogflow-cx/CHANGELOG.md new file mode 100644 index 000000000000..8375d6b13f50 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/CHANGELOG.md @@ -0,0 +1,652 @@ +# Changelog + +## [1.27.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-cx-v1.27.0...google-cloud-dialogflow-cx-v1.27.1) (2023-09-19) + + +### Documentation + +* Minor formatting ([#11632](https://github.com/googleapis/google-cloud-python/issues/11632)) ([dbee08f](https://github.com/googleapis/google-cloud-python/commit/dbee08f2df63e1906ba13b0d3060eec5a80c79e2)) + +## [1.27.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-cx-v1.26.0...google-cloud-dialogflow-cx-v1.27.0) (2023-08-31) + + +### Features + +* added generative AI, safety/security and speech endpointing settings ([#11614](https://github.com/googleapis/google-cloud-python/issues/11614)) ([85fb0fe](https://github.com/googleapis/google-cloud-python/commit/85fb0fe9ba4215e9f0daea49270af9b1d1664c7a)) +* added generative AI, safety/security and speech endpointing settings ([#11621](https://github.com/googleapis/google-cloud-python/issues/11621)) ([f0a5399](https://github.com/googleapis/google-cloud-python/commit/f0a539993f9baac2b32fc13d2297ee9deb623186)) + +## [1.26.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.25.0...v1.26.0) (2023-08-10) + + +### Features + +* Added agent level route group ([30e8c2b](https://github.com/googleapis/python-dialogflow-cx/commit/30e8c2b6d687e35168801660be70caa52131ff17)) +* Added flow import strategy ([30e8c2b](https://github.com/googleapis/python-dialogflow-cx/commit/30e8c2b6d687e35168801660be70caa52131ff17)) + +## [1.25.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.24.0...v1.25.0) (2023-07-11) + + +### Features + +* **v3:** Added GitIntegrationSettings to the Agent ([#535](https://github.com/googleapis/python-dialogflow-cx/issues/535)) ([32b4a66](https://github.com/googleapis/python-dialogflow-cx/commit/32b4a668d5acd07b5f6f431e816fcfac9bb3053b)) + +## [1.24.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.23.1...v1.24.0) (2023-07-10) + + +### Features + +* **v3beta1:** Added GitIntegrationSettings to the Agent ([#531](https://github.com/googleapis/python-dialogflow-cx/issues/531)) ([5938750](https://github.com/googleapis/python-dialogflow-cx/commit/59387500bead38b100815003a3cf14d90de36789)) + +## [1.23.1](https://github.com/googleapis/python-dialogflow-cx/compare/v1.23.0...v1.23.1) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#529](https://github.com/googleapis/python-dialogflow-cx/issues/529)) ([e5e5ceb](https://github.com/googleapis/python-dialogflow-cx/commit/e5e5ceb35fd307b7395124dee961df545567fc11)) + +## [1.23.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.22.0...v1.23.0) (2023-06-19) + + +### Features + +* Added include_bigquery_export_settings to ExportAgentRequest ([9878beb](https://github.com/googleapis/python-dialogflow-cx/commit/9878beb7644c52032a906e89d7de1524d71b5197)) +* Added session_ttl to SessionProto ([9878beb](https://github.com/googleapis/python-dialogflow-cx/commit/9878beb7644c52032a906e89d7de1524d71b5197)) + +## [1.22.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.21.0...v1.22.0) (2023-06-13) + + +### Features + +* Add support for flexible webhook ([ba8d7d3](https://github.com/googleapis/python-dialogflow-cx/commit/ba8d7d341063fcd2ed1591ea84efb395340bc960)) + + +### Documentation + +* Update synthesize speech configs's documentation ([ba8d7d3](https://github.com/googleapis/python-dialogflow-cx/commit/ba8d7d341063fcd2ed1591ea84efb395340bc960)) + +## [1.21.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.20.1...v1.21.0) (2023-05-25) + + +### Features + +* Added debug info to StreamingDetectIntent ([6da4342](https://github.com/googleapis/python-dialogflow-cx/commit/6da4342f5f835f1dd36570d7243852ba9d8afde5)) +* Added dtmf digits to WebhookRequest ([6da4342](https://github.com/googleapis/python-dialogflow-cx/commit/6da4342f5f835f1dd36570d7243852ba9d8afde5)) +* Added FLOW as a new DiffType in TestRunDifference ([6da4342](https://github.com/googleapis/python-dialogflow-cx/commit/6da4342f5f835f1dd36570d7243852ba9d8afde5)) +* Extended CreateAgent timeout to 180 seconds ([6da4342](https://github.com/googleapis/python-dialogflow-cx/commit/6da4342f5f835f1dd36570d7243852ba9d8afde5)) + +## [1.20.1](https://github.com/googleapis/python-dialogflow-cx/compare/v1.20.0...v1.20.1) (2023-03-23) + + +### Documentation + +* Add clarification in AudioInput ([#511](https://github.com/googleapis/python-dialogflow-cx/issues/511)) ([fc0ecdf](https://github.com/googleapis/python-dialogflow-cx/commit/fc0ecdf2f3d29f23de45e6bd44f1c1ec17ddb4ef)) +* Clarified wording around quota usage ([#515](https://github.com/googleapis/python-dialogflow-cx/issues/515)) ([361a359](https://github.com/googleapis/python-dialogflow-cx/commit/361a359e60c8953453a6837e3e5f1916fad76095)) +* Minor formatting changes ([#517](https://github.com/googleapis/python-dialogflow-cx/issues/517)) ([b05b15f](https://github.com/googleapis/python-dialogflow-cx/commit/b05b15f2ac54ffd83a33ee0829cab395f157a42f)) + +## [1.20.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.19.0...v1.20.0) (2023-03-01) + + +### Features + +* Added persist_parameter_changes field from `query_params` to MatchIntentRequest ([#506](https://github.com/googleapis/python-dialogflow-cx/issues/506)) ([27f6020](https://github.com/googleapis/python-dialogflow-cx/commit/27f60205a3068df1a1576f120c97d45bb3501b2a)) + +## [1.19.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.18.0...v1.19.0) (2023-02-28) + + +### Features + +* Added gcs.proto. added support for GcsDestination and TextToSpeechSettings ([acfd1a1](https://github.com/googleapis/python-dialogflow-cx/commit/acfd1a11aca9c9cba7fb351f1781fa73c1e1d985)) +* Added persist_parameter_changes field from query_params to MatchIntentRequest ([acfd1a1](https://github.com/googleapis/python-dialogflow-cx/commit/acfd1a11aca9c9cba7fb351f1781fa73c1e1d985)) +* Enable "rest" transport in Python for services supporting numeric enums ([acfd1a1](https://github.com/googleapis/python-dialogflow-cx/commit/acfd1a11aca9c9cba7fb351f1781fa73c1e1d985)) +* Remove [REQUIRED] for VersionConfig ([acfd1a1](https://github.com/googleapis/python-dialogflow-cx/commit/acfd1a11aca9c9cba7fb351f1781fa73c1e1d985)) + + +### Documentation + +* Add more meaningful comments ([acfd1a1](https://github.com/googleapis/python-dialogflow-cx/commit/acfd1a11aca9c9cba7fb351f1781fa73c1e1d985)) + +## [1.18.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.17.1...v1.18.0) (2023-01-30) + + +### Features + +* Added JSON_PACKAGE field to ExportAgentRequest ([#497](https://github.com/googleapis/python-dialogflow-cx/issues/497)) ([efe6286](https://github.com/googleapis/python-dialogflow-cx/commit/efe628674806271f5584b10eca899745fa190258)) +* **v3beta1:** Added JSON_PACKAGE field to ExportAgentRequest ([#495](https://github.com/googleapis/python-dialogflow-cx/issues/495)) ([d03ec56](https://github.com/googleapis/python-dialogflow-cx/commit/d03ec56bf21d0ce601c5a5029bcf4219b9372254)) + +## [1.17.1](https://github.com/googleapis/python-dialogflow-cx/compare/v1.17.0...v1.17.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([dc4f8e2](https://github.com/googleapis/python-dialogflow-cx/commit/dc4f8e26bd7f38ec3cafe4d358d8852ca8ea8a84)) + + +### Documentation + +* Add documentation for enums ([dc4f8e2](https://github.com/googleapis/python-dialogflow-cx/commit/dc4f8e26bd7f38ec3cafe4d358d8852ca8ea8a84)) + +## [1.17.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.16.0...v1.17.0) (2023-01-14) + + +### Features + +* **v3:** ResponseMessage proto contains channel information ([#488](https://github.com/googleapis/python-dialogflow-cx/issues/488)) ([a685e8c](https://github.com/googleapis/python-dialogflow-cx/commit/a685e8c8e2ed20325a0adddfdb6691030bd923b9)) + +## [1.16.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.15.0...v1.16.0) (2023-01-12) + + +### Features + +* **v3beta1:** ResponseMessage proto contains channel information ([#485](https://github.com/googleapis/python-dialogflow-cx/issues/485)) ([242abc6](https://github.com/googleapis/python-dialogflow-cx/commit/242abc60b7402dc1937105b34c88cf1b75d704da)) + +## [1.15.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.14.1...v1.15.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#482](https://github.com/googleapis/python-dialogflow-cx/issues/482)) ([1b846d2](https://github.com/googleapis/python-dialogflow-cx/commit/1b846d292e7ec8deb555d489c8adde0d6817f47f)) + +## [1.14.1](https://github.com/googleapis/python-dialogflow-cx/compare/v1.14.0...v1.14.1) (2023-01-06) + + +### Documentation + +* **samples:** Update previous month logic to avoid zero-index bug in webhook_prebuilt_telecom sample ([#479](https://github.com/googleapis/python-dialogflow-cx/issues/479)) ([38c188a](https://github.com/googleapis/python-dialogflow-cx/commit/38c188ac7ec89471fdaff3981e8e66c820ff683c)) + +## [1.14.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.13.5...v1.14.0) (2022-12-15) + + +### Features + +* Add files field to finding's list of attributes ([ff576ee](https://github.com/googleapis/python-dialogflow-cx/commit/ff576eefe4a202327fd3de6e98da1cca24ddad4e)) +* Add support for `google.cloud.dialogflowcx.__version__` ([ff576ee](https://github.com/googleapis/python-dialogflow-cx/commit/ff576eefe4a202327fd3de6e98da1cca24ddad4e)) +* Add typing to proto.Message based class attributes ([ff576ee](https://github.com/googleapis/python-dialogflow-cx/commit/ff576eefe4a202327fd3de6e98da1cca24ddad4e)) + + +### Bug Fixes + +* Add dict typing for client_options ([ff576ee](https://github.com/googleapis/python-dialogflow-cx/commit/ff576eefe4a202327fd3de6e98da1cca24ddad4e)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([ff576ee](https://github.com/googleapis/python-dialogflow-cx/commit/ff576eefe4a202327fd3de6e98da1cca24ddad4e)) +* Drop usage of pkg_resources ([ff576ee](https://github.com/googleapis/python-dialogflow-cx/commit/ff576eefe4a202327fd3de6e98da1cca24ddad4e)) +* Fix timeout default values ([ff576ee](https://github.com/googleapis/python-dialogflow-cx/commit/ff576eefe4a202327fd3de6e98da1cca24ddad4e)) + + +### Documentation + +* Clarified Agent Assist max retention is 30 days ([ff576ee](https://github.com/googleapis/python-dialogflow-cx/commit/ff576eefe4a202327fd3de6e98da1cca24ddad4e)) +* Clarify interactive logging TTL behavior ([ff576ee](https://github.com/googleapis/python-dialogflow-cx/commit/ff576eefe4a202327fd3de6e98da1cca24ddad4e)) +* **samples:** Snippetgen handling of repeated enum field ([ff576ee](https://github.com/googleapis/python-dialogflow-cx/commit/ff576eefe4a202327fd3de6e98da1cca24ddad4e)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([ff576ee](https://github.com/googleapis/python-dialogflow-cx/commit/ff576eefe4a202327fd3de6e98da1cca24ddad4e)) + +## [1.13.5](https://github.com/googleapis/python-dialogflow-cx/compare/v1.13.4...v1.13.5) (2022-10-26) + + +### Documentation + +* Clarified TTL as time-to-live ([#465](https://github.com/googleapis/python-dialogflow-cx/issues/465)) ([5e8fc34](https://github.com/googleapis/python-dialogflow-cx/commit/5e8fc34f7a40351c2a3092fb835bf8ac0f3e63d1)) + +## [1.13.4](https://github.com/googleapis/python-dialogflow-cx/compare/v1.13.3...v1.13.4) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#462](https://github.com/googleapis/python-dialogflow-cx/issues/462)) ([8cd6d6d](https://github.com/googleapis/python-dialogflow-cx/commit/8cd6d6df6c19b93e8116e968fb63292153a019b7)) + +## [1.13.3](https://github.com/googleapis/python-dialogflow-cx/compare/v1.13.2...v1.13.3) (2022-10-04) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#458](https://github.com/googleapis/python-dialogflow-cx/issues/458)) ([07e19d4](https://github.com/googleapis/python-dialogflow-cx/commit/07e19d476e3382ead73b44f4bc8b84e230aa6115)) + + +### Documentation + +* Clarified gcs_bucket field of the SecuritySettings message ([#460](https://github.com/googleapis/python-dialogflow-cx/issues/460)) ([f922211](https://github.com/googleapis/python-dialogflow-cx/commit/f922211c6dde44572303926f9456d040e041a224)) +* **samples:** Adding snippet to extract SessionInfo ([d6ef048](https://github.com/googleapis/python-dialogflow-cx/commit/d6ef04841975e9076c940802283d1a9ba8e05eb4)) + +## [1.13.2](https://github.com/googleapis/python-dialogflow-cx/compare/v1.13.1...v1.13.2) (2022-09-02) + + +### Documentation + +* **samples:** Add prebuilt telecom agent webhook code in python ([#434](https://github.com/googleapis/python-dialogflow-cx/issues/434)) ([45926d2](https://github.com/googleapis/python-dialogflow-cx/commit/45926d2fd7bcfb49785ad339e7141349e31f91b4)) + +## [1.13.1](https://github.com/googleapis/python-dialogflow-cx/compare/v1.13.0...v1.13.1) (2022-08-15) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#435](https://github.com/googleapis/python-dialogflow-cx/issues/435)) ([519012b](https://github.com/googleapis/python-dialogflow-cx/commit/519012bf13179d4fdbe6856ca21fd539b4f776b1)) +* **deps:** require proto-plus >= 1.22.0 ([519012b](https://github.com/googleapis/python-dialogflow-cx/commit/519012bf13179d4fdbe6856ca21fd539b4f776b1)) + +## [1.13.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.12.1...v1.13.0) (2022-07-15) + + +### Features + +* add audience parameter ([3a356c9](https://github.com/googleapis/python-dialogflow-cx/commit/3a356c951d64f99ca9fb0eb426921f42f5f41abf)) +* **v3:** added webhook_config ([3a356c9](https://github.com/googleapis/python-dialogflow-cx/commit/3a356c951d64f99ca9fb0eb426921f42f5f41abf)) +* **v3beta1:** added webhook_config ([3a356c9](https://github.com/googleapis/python-dialogflow-cx/commit/3a356c951d64f99ca9fb0eb426921f42f5f41abf)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([3a356c9](https://github.com/googleapis/python-dialogflow-cx/commit/3a356c951d64f99ca9fb0eb426921f42f5f41abf)) +* require python 3.7+ ([4746c9f](https://github.com/googleapis/python-dialogflow-cx/commit/4746c9f1c34e54892c64857751b84b204b6a8871)) + + +### Documentation + +* add detect intent with intent input snippet ([#417](https://github.com/googleapis/python-dialogflow-cx/issues/417)) ([04ded3c](https://github.com/googleapis/python-dialogflow-cx/commit/04ded3ce2cdee30bdf0dd91f7164d32fcda204bd)) +* add detect intent with sentiment analysis snippet ([#416](https://github.com/googleapis/python-dialogflow-cx/issues/416)) ([dcd8319](https://github.com/googleapis/python-dialogflow-cx/commit/dcd8319e2edef2e99dffea50b202f9b7f5a52c06)) +* add detect intent with text-to-speech synthesized output snippet ([#419](https://github.com/googleapis/python-dialogflow-cx/issues/419)) ([ac6aae4](https://github.com/googleapis/python-dialogflow-cx/commit/ac6aae4fa4607bf17aa9934ccba440d976b25f17)) +* Add dialogflow cx detect intent with disabled webhook snippet ([#422](https://github.com/googleapis/python-dialogflow-cx/issues/422)) ([406e84d](https://github.com/googleapis/python-dialogflow-cx/commit/406e84dabb889c8f43d4084c6e4ed39fbc6d4ee0)) +* add streaming detect intent with partial response sample ([#414](https://github.com/googleapis/python-dialogflow-cx/issues/414)) ([57a0e16](https://github.com/googleapis/python-dialogflow-cx/commit/57a0e16995bf2520306c8d29ee84088345a9b3be)) +* clarify descriptions of the AdvancedSettings and WebhookRequest data types ([3a356c9](https://github.com/googleapis/python-dialogflow-cx/commit/3a356c951d64f99ca9fb0eb426921f42f5f41abf)) +* Dialogflow cx v3 detect intent event input snippet ([#421](https://github.com/googleapis/python-dialogflow-cx/issues/421)) ([0524558](https://github.com/googleapis/python-dialogflow-cx/commit/052455829c99ff0838a5a140a0496bcff4a45178)) +* improve comments for protos ([3a356c9](https://github.com/googleapis/python-dialogflow-cx/commit/3a356c951d64f99ca9fb0eb426921f42f5f41abf)) +* Update region_tag: dialogflow_detect_intent_text --> dialogflow_cx_detect_intent_text ([#424](https://github.com/googleapis/python-dialogflow-cx/issues/424)) ([dd2257c](https://github.com/googleapis/python-dialogflow-cx/commit/dd2257c850059e1cb63c11326dd760665079c32b)) + +## [1.12.1](https://github.com/googleapis/python-dialogflow-cx/compare/v1.12.0...v1.12.1) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#396](https://github.com/googleapis/python-dialogflow-cx/issues/396)) ([51be5ca](https://github.com/googleapis/python-dialogflow-cx/commit/51be5ca187e68f17af53e6a41e59bb4cc04086c1)) + + +### Documentation + +* fix changelog header to consistent size ([#395](https://github.com/googleapis/python-dialogflow-cx/issues/395)) ([f21bc73](https://github.com/googleapis/python-dialogflow-cx/commit/f21bc7349da7fabe50cc5fb846db43e15da8b71d)) + +## [1.12.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.11.0...v1.12.0) (2022-05-09) + + +### Features + +* **v3beta1:** added audio_export_settings ([#311](https://github.com/googleapis/python-dialogflow-cx/issues/311)) ([228ae83](https://github.com/googleapis/python-dialogflow-cx/commit/228ae8310b71412c1636a5ef214f62dad6473e40)) + + +### Documentation + +* **v3:** update the doc on diagnostic info ([#314](https://github.com/googleapis/python-dialogflow-cx/issues/314)) ([6109b64](https://github.com/googleapis/python-dialogflow-cx/commit/6109b64efb85480fdd9793476db14e65c40a0333)) + +## [1.11.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.10.0...v1.11.0) (2022-05-03) + + +### Features + +* added data format specification for export agent ([1844193](https://github.com/googleapis/python-dialogflow-cx/commit/1844193788ce3d9f66f2822846b31f469f34f530)) +* **v3:** add support for locking an agent for changes ([#276](https://github.com/googleapis/python-dialogflow-cx/issues/276)) ([1844193](https://github.com/googleapis/python-dialogflow-cx/commit/1844193788ce3d9f66f2822846b31f469f34f530)) +* **v3:** added audio_export_settings ([#300](https://github.com/googleapis/python-dialogflow-cx/issues/300)) ([b225dfe](https://github.com/googleapis/python-dialogflow-cx/commit/b225dfe9371e89ee54e483888b7ed36d8374e011)) +* **v3beta1:** added data format specification for export agent ([72e624a](https://github.com/googleapis/python-dialogflow-cx/commit/72e624ae6430fd454380cdf6e348dbe763e56e6f)) +* **v3beta1:** added support for locking an agent for changes ([#281](https://github.com/googleapis/python-dialogflow-cx/issues/281)) ([72e624a](https://github.com/googleapis/python-dialogflow-cx/commit/72e624ae6430fd454380cdf6e348dbe763e56e6f)) + + +### Documentation + +* improved docs format ([#275](https://github.com/googleapis/python-dialogflow-cx/issues/275)) ([560e6a4](https://github.com/googleapis/python-dialogflow-cx/commit/560e6a478a7daaf1ac9fac23ca01390a6e7699a7)) +* minor wording update ([#294](https://github.com/googleapis/python-dialogflow-cx/issues/294)) ([f660888](https://github.com/googleapis/python-dialogflow-cx/commit/f660888d55f5aab2b40d5e7f0b214e3e8ba01864)) +* **samples:** Adds snippet for configuring a webhook to enable an agent response. ([#306](https://github.com/googleapis/python-dialogflow-cx/issues/306)) ([c0cc924](https://github.com/googleapis/python-dialogflow-cx/commit/c0cc924a257740e68d244c265c6406a8d6888cc5)) +* **samples:** Adds snippet for configuring optional or required form parameters ([#305](https://github.com/googleapis/python-dialogflow-cx/issues/305)) ([720c0bd](https://github.com/googleapis/python-dialogflow-cx/commit/720c0bdd0f5707a2c71c9d32a91f06f86c48a98e)) +* **samples:** Adds snippet for validating a form parameter. ([#302](https://github.com/googleapis/python-dialogflow-cx/issues/302)) ([8cfe6a1](https://github.com/googleapis/python-dialogflow-cx/commit/8cfe6a15b18b558faa2cd5ceb8dc7b291add4444)) +* **samples:** Configure session parameters snippet ([#303](https://github.com/googleapis/python-dialogflow-cx/issues/303)) ([ace3936](https://github.com/googleapis/python-dialogflow-cx/commit/ace393696fdec8040bd5b23daa09f80183ad9125)) +* **samples:** Configure session parameters trigger transition ([#304](https://github.com/googleapis/python-dialogflow-cx/issues/304)) ([d6cab9d](https://github.com/googleapis/python-dialogflow-cx/commit/d6cab9dd6db83aea15548795a0fc449f72a2b56f)) +* **v3beta1:** added explanation for uri fields in resources ([#273](https://github.com/googleapis/python-dialogflow-cx/issues/273)) ([01b0f8f](https://github.com/googleapis/python-dialogflow-cx/commit/01b0f8f795d06609b370ba7568db753152764d9b)) +* **v3beta1:** improved docs format ([01b0f8f](https://github.com/googleapis/python-dialogflow-cx/commit/01b0f8f795d06609b370ba7568db753152764d9b)) + +## [1.10.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.9.1...v1.10.0) (2022-03-10) + + +### Features + +* added page in TestConfig ([#268](https://github.com/googleapis/python-dialogflow-cx/issues/268)) ([61fd2dc](https://github.com/googleapis/python-dialogflow-cx/commit/61fd2dc3512e4bcfdfc51cf085cc12cbbf1e043b)) +* **v3beta1:** added page in TestConfig ([#270](https://github.com/googleapis/python-dialogflow-cx/issues/270)) ([a81f0c2](https://github.com/googleapis/python-dialogflow-cx/commit/a81f0c23d5dc835bd6abdccd546ef00f6c421a74)) + + +### Documentation + +* clarified wording around Cloud Storage usage ([61fd2dc](https://github.com/googleapis/python-dialogflow-cx/commit/61fd2dc3512e4bcfdfc51cf085cc12cbbf1e043b)) + +## [1.9.1](https://github.com/googleapis/python-dialogflow-cx/compare/v1.9.0...v1.9.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#263](https://github.com/googleapis/python-dialogflow-cx/issues/263)) ([6235775](https://github.com/googleapis/python-dialogflow-cx/commit/623577550fc96786c2142d971dea4c31c116181a)) +* **deps:** require proto-plus>=1.15.0 ([6235775](https://github.com/googleapis/python-dialogflow-cx/commit/623577550fc96786c2142d971dea4c31c116181a)) + +## [1.9.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.8.0...v1.9.0) (2022-02-26) + + +### Features + +* add api_key support ([#240](https://github.com/googleapis/python-dialogflow-cx/issues/240)) ([cf95791](https://github.com/googleapis/python-dialogflow-cx/commit/cf9579171290ecf5afeeb6a38a3504857808a4ef)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([f3bf440](https://github.com/googleapis/python-dialogflow-cx/commit/f3bf440a081622dfc6d8d8e385f2edb5c3e11202)) + + +### Documentation + +* add autogenerated code snippets ([2292ff5](https://github.com/googleapis/python-dialogflow-cx/commit/2292ff540aea24c3c831a5ffe1604c2c022ccb82)) + +## [1.8.0](https://github.com/googleapis/python-dialogflow-cx/compare/v1.7.0...v1.8.0) (2022-01-14) + + +### Features + +* **v3:** added `TelephonyTransferCall` in response message ([#216](https://github.com/googleapis/python-dialogflow-cx/issues/216)) ([76dae8b](https://github.com/googleapis/python-dialogflow-cx/commit/76dae8b03c0e3bf33123b8001e3f8d40701b5c19)) +* **v3:** added the display name of the current page in webhook requests ([#221](https://github.com/googleapis/python-dialogflow-cx/issues/221)) ([aa91b72](https://github.com/googleapis/python-dialogflow-cx/commit/aa91b729ffa07230b011a61d3eb1521f59345fc1)) +* **v3:** allow setting custom CA for generic webhooks ([#214](https://github.com/googleapis/python-dialogflow-cx/issues/214)) ([8f3dc03](https://github.com/googleapis/python-dialogflow-cx/commit/8f3dc03835b5bb5baa36224a274f24dda7aa3709)) +* **v3beta1:** added `TelephonyTransferCall` in response message ([#217](https://github.com/googleapis/python-dialogflow-cx/issues/217)) ([e24bdfd](https://github.com/googleapis/python-dialogflow-cx/commit/e24bdfd499952199dfbdaa5634061653da8ae1db)) +* **v3beta1:** added the display name of the current page in webhook requests ([#222](https://github.com/googleapis/python-dialogflow-cx/issues/222)) ([5956179](https://github.com/googleapis/python-dialogflow-cx/commit/595617990ccdc0575f97ae547984e89f638cb664)) +* **v3:** release CompareVersions API ([8f3dc03](https://github.com/googleapis/python-dialogflow-cx/commit/8f3dc03835b5bb5baa36224a274f24dda7aa3709)) + +## [1.7.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v1.6.0...v1.7.0) (2021-11-12) + + +### Features + +* allow setting custom CA for generic webhooks ([#207](https://www.github.com/googleapis/python-dialogflow-cx/issues/207)) ([441d66b](https://www.github.com/googleapis/python-dialogflow-cx/commit/441d66b3864c34cf37570b6c58ccf097eb20e919)) + + +### Documentation + +* **samples:** added comment to webhook sample ([#211](https://www.github.com/googleapis/python-dialogflow-cx/issues/211)) ([4d36e31](https://www.github.com/googleapis/python-dialogflow-cx/commit/4d36e31f20e38755dd5d4a7a92bb8c48722cb11e)) + +## [1.6.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v1.5.0...v1.6.0) (2021-11-05) + + +### Features + +* add support for python 3.10 ([#192](https://www.github.com/googleapis/python-dialogflow-cx/issues/192)) ([527b679](https://www.github.com/googleapis/python-dialogflow-cx/commit/527b679286ce7ed6481bf5c9258858473ca1f601)) +* **v3:** added API for changelogs ([#197](https://www.github.com/googleapis/python-dialogflow-cx/issues/197)) ([4f41653](https://www.github.com/googleapis/python-dialogflow-cx/commit/4f41653b08a5be4aa6c871f285d941a2f43740a9)) +* **v3beta1:** added API for changelogs ([#198](https://www.github.com/googleapis/python-dialogflow-cx/issues/198)) ([484e13a](https://www.github.com/googleapis/python-dialogflow-cx/commit/484e13a78830a3d0ce8b1745fdf2dfce0f88a21e)) +* **v3beta1:** added support for comparing between versions ([#202](https://www.github.com/googleapis/python-dialogflow-cx/issues/202)) ([b8e16f8](https://www.github.com/googleapis/python-dialogflow-cx/commit/b8e16f8a17a49c1fc65f7f4392e33fb273bcd8ca)) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([c44582e](https://www.github.com/googleapis/python-dialogflow-cx/commit/c44582e2fe3d49327bb0fc5ba05d2959b6965b7b)) +* **deps:** require google-api-core >= 1.28.0 ([c44582e](https://www.github.com/googleapis/python-dialogflow-cx/commit/c44582e2fe3d49327bb0fc5ba05d2959b6965b7b)) + + +### Documentation + +* list oneofs in docstring ([c44582e](https://www.github.com/googleapis/python-dialogflow-cx/commit/c44582e2fe3d49327bb0fc5ba05d2959b6965b7b)) +* **samples:** add voice selection ([#184](https://www.github.com/googleapis/python-dialogflow-cx/issues/184)) ([778b86f](https://www.github.com/googleapis/python-dialogflow-cx/commit/778b86f69e88ceb884bb9933128d2b14c6689174)) +* **samples:** added webhook sample ([#169](https://www.github.com/googleapis/python-dialogflow-cx/issues/169)) ([74cfc9a](https://www.github.com/googleapis/python-dialogflow-cx/commit/74cfc9a7ab9b85fa05565c406534a3cd3391ab4f)) +* **samples:** adds list training phrases sample ([#196](https://www.github.com/googleapis/python-dialogflow-cx/issues/196)) ([dfefa4e](https://www.github.com/googleapis/python-dialogflow-cx/commit/dfefa4e21bcd6bf37d341911187fab283152f514)) +* **v3beta1:** clarified security settings API reference ([b8e16f8](https://www.github.com/googleapis/python-dialogflow-cx/commit/b8e16f8a17a49c1fc65f7f4392e33fb273bcd8ca)) +* **v3:** clarified semantic of the streaming APIs ([4f41653](https://www.github.com/googleapis/python-dialogflow-cx/commit/4f41653b08a5be4aa6c871f285d941a2f43740a9)) + +## [1.5.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v1.4.0...v1.5.0) (2021-10-11) + + +### Features + +* add context manager support in client ([#187](https://www.github.com/googleapis/python-dialogflow-cx/issues/187)) ([70d4776](https://www.github.com/googleapis/python-dialogflow-cx/commit/70d4776d80f1f60b3f45e08e8dc64a47e2fae3e6)) + +## [1.4.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v1.3.2...v1.4.0) (2021-10-05) + + +### Features + +* expose dtmf input info in the query result ([#181](https://www.github.com/googleapis/python-dialogflow-cx/issues/181)) ([e44d0ce](https://www.github.com/googleapis/python-dialogflow-cx/commit/e44d0cee6b51842e45255773c33ae0bbfc672f30)) +* exposed DTMF input info in the query result ([1383bf8](https://www.github.com/googleapis/python-dialogflow-cx/commit/1383bf82b8b2ae0cd92d81768a693a2732a53a46)) +* **v3:** added deployment API ([#182](https://www.github.com/googleapis/python-dialogflow-cx/issues/182)) ([1383bf8](https://www.github.com/googleapis/python-dialogflow-cx/commit/1383bf82b8b2ae0cd92d81768a693a2732a53a46)) +* **v3beta1:** added support for DeployFlow api under Environments ([977e2aa](https://www.github.com/googleapis/python-dialogflow-cx/commit/977e2aa9c17c235d125c633a6f4ba9f5a1dade7b)) +* **v3beta1:** added support for Deployments with ListDeployments and GetDeployment apis ([#177](https://www.github.com/googleapis/python-dialogflow-cx/issues/177)) ([977e2aa](https://www.github.com/googleapis/python-dialogflow-cx/commit/977e2aa9c17c235d125c633a6f4ba9f5a1dade7b)) +* **v3beta1:** added support for TestCasesConfig under Environment ([977e2aa](https://www.github.com/googleapis/python-dialogflow-cx/commit/977e2aa9c17c235d125c633a6f4ba9f5a1dade7b)) + + +### Bug Fixes + +* improper types in pagers generation ([615718b](https://www.github.com/googleapis/python-dialogflow-cx/commit/615718bf80e5c2f3060ff38675a7f4fe8deb59b1)) +* **v3beta1:** marked resource name of security setting as not-required ([977e2aa](https://www.github.com/googleapis/python-dialogflow-cx/commit/977e2aa9c17c235d125c633a6f4ba9f5a1dade7b)) + + +### Documentation + +* added notes on long running operation ([1383bf8](https://www.github.com/googleapis/python-dialogflow-cx/commit/1383bf82b8b2ae0cd92d81768a693a2732a53a46)) +* **v3beta1:** added long running operation explanation for several apis ([977e2aa](https://www.github.com/googleapis/python-dialogflow-cx/commit/977e2aa9c17c235d125c633a6f4ba9f5a1dade7b)) + +## [1.3.2](https://www.github.com/googleapis/python-dialogflow-cx/compare/v1.3.1...v1.3.2) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([96021b9](https://www.github.com/googleapis/python-dialogflow-cx/commit/96021b9a78a40f3f8f9898d53493912a1621da89)) + + +### Documentation + +* **samples:** added filter sample ([#171](https://www.github.com/googleapis/python-dialogflow-cx/issues/171)) ([5b7e6b9](https://www.github.com/googleapis/python-dialogflow-cx/commit/5b7e6b9fcb066e823d58c5ce3af0f32a54d896bc)) +* **samples:** added page management samples ([#152](https://www.github.com/googleapis/python-dialogflow-cx/issues/152)) ([41d15f8](https://www.github.com/googleapis/python-dialogflow-cx/commit/41d15f8f212baab09222c6a350efc3376a3bf9ea)) + +## [1.3.1](https://www.github.com/googleapis/python-dialogflow-cx/compare/v1.3.0...v1.3.1) (2021-08-31) + + +### Documentation + +* **v3beta1:** clarified LRO types ([#161](https://www.github.com/googleapis/python-dialogflow-cx/issues/161)) ([dd2b2fd](https://www.github.com/googleapis/python-dialogflow-cx/commit/dd2b2fd345e91fa3da1290f6896ac6b521c2eb7b)) +* **v3beta1:** fixed incorrect update mask descriptions ([dd2b2fd](https://www.github.com/googleapis/python-dialogflow-cx/commit/dd2b2fd345e91fa3da1290f6896ac6b521c2eb7b)) + +## [1.3.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v1.2.0...v1.3.0) (2021-08-27) + + +### Features + +* **v3:** added support for DLP templates ([#144](https://www.github.com/googleapis/python-dialogflow-cx/issues/144)) ([c74e3ac](https://www.github.com/googleapis/python-dialogflow-cx/commit/c74e3acb609363cdf941586e98ed2c8c5804980b)) +* **v3beta1:** added support for DLP templates ([#143](https://www.github.com/googleapis/python-dialogflow-cx/issues/143)) ([7efb89c](https://www.github.com/googleapis/python-dialogflow-cx/commit/7efb89cc6311e1df9d03740ba6d078af3f79559e)) +* **v3beta1:** expose `Locations` service to get/list avaliable locations of Dialogflow products ([7efb89c](https://www.github.com/googleapis/python-dialogflow-cx/commit/7efb89cc6311e1df9d03740ba6d078af3f79559e)) +* **v3:** expose `Locations` service to get/list avaliable locations of Dialogflow products ([c74e3ac](https://www.github.com/googleapis/python-dialogflow-cx/commit/c74e3acb609363cdf941586e98ed2c8c5804980b)) + + +### Documentation + +* clarified LRO types ([#156](https://www.github.com/googleapis/python-dialogflow-cx/issues/156)) ([a50e8dc](https://www.github.com/googleapis/python-dialogflow-cx/commit/a50e8dca8952cea19be2587ee68c600a41a92eeb)) +* fixed incorrect update mask descriptions ([a50e8dc](https://www.github.com/googleapis/python-dialogflow-cx/commit/a50e8dca8952cea19be2587ee68c600a41a92eeb)) +* **samples:** add agent creation code snippet ([#146](https://www.github.com/googleapis/python-dialogflow-cx/issues/146)) ([272fc98](https://www.github.com/googleapis/python-dialogflow-cx/commit/272fc9879e536f7a9ea31d8c10169cf644170769)) +* **samples:** add region tags ([#150](https://www.github.com/googleapis/python-dialogflow-cx/issues/150)) ([54ea84d](https://www.github.com/googleapis/python-dialogflow-cx/commit/54ea84d58064a7c8d0a71f020a0c8cf36ac157bd)) +* **samples:** add region tags ([#151](https://www.github.com/googleapis/python-dialogflow-cx/issues/151)) ([788b67a](https://www.github.com/googleapis/python-dialogflow-cx/commit/788b67a8c966788a70a5d683a5e79e3a289c7ba3)) +* **samples:** add update intent sample ([#142](https://www.github.com/googleapis/python-dialogflow-cx/issues/142)) ([3e80235](https://www.github.com/googleapis/python-dialogflow-cx/commit/3e80235277a0df12644d743f6853ba45263a1239)) +* **v3beta1:** reorder some fields ([7efb89c](https://www.github.com/googleapis/python-dialogflow-cx/commit/7efb89cc6311e1df9d03740ba6d078af3f79559e)) + +## [1.2.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v1.1.1...v1.2.0) (2021-08-02) + + +### Features + +* **v3:** add advanced settings for agent level ([#137](https://www.github.com/googleapis/python-dialogflow-cx/issues/137)) ([24ef477](https://www.github.com/googleapis/python-dialogflow-cx/commit/24ef4773d4f6392ff39e7cdf70ff67b64ee50449)) +* **v3:** add insights export settings for security setting ([24ef477](https://www.github.com/googleapis/python-dialogflow-cx/commit/24ef4773d4f6392ff39e7cdf70ff67b64ee50449)) +* **v3:** add language code for streaming recognition result and flow versions for query parameters ([24ef477](https://www.github.com/googleapis/python-dialogflow-cx/commit/24ef4773d4f6392ff39e7cdf70ff67b64ee50449)) +* **v3:** add rollout config, state and failure reason for experiment ([24ef477](https://www.github.com/googleapis/python-dialogflow-cx/commit/24ef4773d4f6392ff39e7cdf70ff67b64ee50449)) +* **v3beta1:** add advanced settings for agent level ([#138](https://www.github.com/googleapis/python-dialogflow-cx/issues/138)) ([96141a1](https://www.github.com/googleapis/python-dialogflow-cx/commit/96141a11fdba3dcb2a77a261505583bba75fcc77)) +* **v3beta1:** add insights export settings for security setting ([96141a1](https://www.github.com/googleapis/python-dialogflow-cx/commit/96141a11fdba3dcb2a77a261505583bba75fcc77)) +* **v3beta1:** add language code for streaming recognition result and flow versions for query parameters ([96141a1](https://www.github.com/googleapis/python-dialogflow-cx/commit/96141a11fdba3dcb2a77a261505583bba75fcc77)) +* **v3beta1:** add rollout config, state and failure reason for experiment ([96141a1](https://www.github.com/googleapis/python-dialogflow-cx/commit/96141a11fdba3dcb2a77a261505583bba75fcc77)) + + +### Documentation + +* **v3beta1:** deprecate legacy logging settings ([96141a1](https://www.github.com/googleapis/python-dialogflow-cx/commit/96141a11fdba3dcb2a77a261505583bba75fcc77)) +* **v3:** deprecate legacy logging settings ([24ef477](https://www.github.com/googleapis/python-dialogflow-cx/commit/24ef4773d4f6392ff39e7cdf70ff67b64ee50449)) + +## [1.1.1](https://www.github.com/googleapis/python-dialogflow-cx/compare/v1.1.0...v1.1.1) (2021-07-24) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#134](https://www.github.com/googleapis/python-dialogflow-cx/issues/134)) ([5e42bd0](https://www.github.com/googleapis/python-dialogflow-cx/commit/5e42bd0786607ed0636e80be073007224bcb520e)) + +## [1.1.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v1.0.0...v1.1.0) (2021-07-22) + + +### Features + +* add Samples section to CONTRIBUTING.rst ([#129](https://www.github.com/googleapis/python-dialogflow-cx/issues/129)) ([19238ad](https://www.github.com/googleapis/python-dialogflow-cx/commit/19238ad534b7528e3481e50a44663b541da197ef)) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#128](https://www.github.com/googleapis/python-dialogflow-cx/issues/128)) ([fbe63e8](https://www.github.com/googleapis/python-dialogflow-cx/commit/fbe63e86df05150e009e8e7b7e103735b382556e)) + +## [1.0.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v0.8.0...v1.0.0) (2021-06-30) + + +### Features + +* bump release level to production/stable ([#88](https://www.github.com/googleapis/python-dialogflow-cx/issues/88)) ([47739f9](https://www.github.com/googleapis/python-dialogflow-cx/commit/47739f927032ca8701297260f0374f11b2c756b9)) + +## [0.8.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v0.7.1...v0.8.0) (2021-06-30) + + +### Features + +* add always_use_jwt_access ([3550fa7](https://www.github.com/googleapis/python-dialogflow-cx/commit/3550fa7fa5863a313bd9288a1f515557f8f9fdea)) +* add return_partial response to Fulfillment ([3550fa7](https://www.github.com/googleapis/python-dialogflow-cx/commit/3550fa7fa5863a313bd9288a1f515557f8f9fdea)) +* mark agent.default_language_code as required ([3550fa7](https://www.github.com/googleapis/python-dialogflow-cx/commit/3550fa7fa5863a313bd9288a1f515557f8f9fdea)) + + +### Bug Fixes + +* disable always_use_jwt_access ([#119](https://www.github.com/googleapis/python-dialogflow-cx/issues/119)) ([3550fa7](https://www.github.com/googleapis/python-dialogflow-cx/commit/3550fa7fa5863a313bd9288a1f515557f8f9fdea)) + + +### Documentation + +* add notes to train agent before sending queries ([3550fa7](https://www.github.com/googleapis/python-dialogflow-cx/commit/3550fa7fa5863a313bd9288a1f515557f8f9fdea)) +* added notes to train agent before sending queries ([#111](https://www.github.com/googleapis/python-dialogflow-cx/issues/111)) ([8a53800](https://www.github.com/googleapis/python-dialogflow-cx/commit/8a5380055dfa7d58f83be5ce0da310318b54fc51)) +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-dialogflow-cx/issues/1127)) ([#110](https://www.github.com/googleapis/python-dialogflow-cx/issues/110)) ([40974f5](https://www.github.com/googleapis/python-dialogflow-cx/commit/40974f56c27833ab23575426f75d0868e1a10d94)), closes [#1126](https://www.github.com/googleapis/python-dialogflow-cx/issues/1126) + +## [0.7.1](https://www.github.com/googleapis/python-dialogflow-cx/compare/v0.7.0...v0.7.1) (2021-06-16) + + +### Bug Fixes + +* **deps:** add packaging requirement ([#105](https://www.github.com/googleapis/python-dialogflow-cx/issues/105)) ([5d47692](https://www.github.com/googleapis/python-dialogflow-cx/commit/5d476920c8c6825050a828896231542f6bfde2e2)) + +## [0.7.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v0.6.0...v0.7.0) (2021-06-16) + + +### Features + +* **v3beta1:** Support partial response feature ([5e21ed4](https://www.github.com/googleapis/python-dialogflow-cx/commit/5e21ed454ad6a5d687e634ad4e697f1921104c47)) +* **v3beta1:** support sentiment analysis in bot testing ([#100](https://www.github.com/googleapis/python-dialogflow-cx/issues/100)) ([f24f302](https://www.github.com/googleapis/python-dialogflow-cx/commit/f24f3028fa83da14614d1340e5bce7719be287b6)) + + +### Bug Fixes + +* exclude docs and tests from package ([#104](https://www.github.com/googleapis/python-dialogflow-cx/issues/104)) ([2ddb70b](https://www.github.com/googleapis/python-dialogflow-cx/commit/2ddb70b5825ad0d59165d7dfbfe36677d586cccf)) +* **v3beta1:** Set agent default language code as required ([#103](https://www.github.com/googleapis/python-dialogflow-cx/issues/103)) ([5e21ed4](https://www.github.com/googleapis/python-dialogflow-cx/commit/5e21ed454ad6a5d687e634ad4e697f1921104c47)) + + +### Documentation + +* **v3beta1:** Update docs of Agents, Fulfillments, SecuritySettings and Sessions ([5e21ed4](https://www.github.com/googleapis/python-dialogflow-cx/commit/5e21ed454ad6a5d687e634ad4e697f1921104c47)) + +## [0.6.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v0.5.0...v0.6.0) (2021-06-07) + + +### Features + +* support sentiment analysis in bot testing ([#98](https://www.github.com/googleapis/python-dialogflow-cx/issues/98)) ([db258bc](https://www.github.com/googleapis/python-dialogflow-cx/commit/db258bcc9971542e347b50f396bd51ec88520fde)) + +## [0.5.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v0.4.1...v0.5.0) (2021-05-28) + + +### Features + +* add export / import flow API ([20df7c3](https://www.github.com/googleapis/python-dialogflow-cx/commit/20df7c3bfabef5da23970512a3f925f4dfe7d2f9)) +* add support for service directory webhooks ([20df7c3](https://www.github.com/googleapis/python-dialogflow-cx/commit/20df7c3bfabef5da23970512a3f925f4dfe7d2f9)) +* added API for continuous test ([#91](https://www.github.com/googleapis/python-dialogflow-cx/issues/91)) ([81d4f53](https://www.github.com/googleapis/python-dialogflow-cx/commit/81d4f53cd4a4080b21221126dacaf2e13ca2efcf)) +* added API for running continuous test ([#94](https://www.github.com/googleapis/python-dialogflow-cx/issues/94)) ([cc30fa3](https://www.github.com/googleapis/python-dialogflow-cx/commit/cc30fa3e767bac2f33637ce1c29766ff41e9225b)) +* added fallback option when restoring an agent ([20df7c3](https://www.github.com/googleapis/python-dialogflow-cx/commit/20df7c3bfabef5da23970512a3f925f4dfe7d2f9)) +* Expose supported languages of the agent; ([20df7c3](https://www.github.com/googleapis/python-dialogflow-cx/commit/20df7c3bfabef5da23970512a3f925f4dfe7d2f9)) +* include original user query in WebhookRequest; add GetTextCaseresult API. ([20df7c3](https://www.github.com/googleapis/python-dialogflow-cx/commit/20df7c3bfabef5da23970512a3f925f4dfe7d2f9)) +* support self-signed JWT flow for service accounts ([20df7c3](https://www.github.com/googleapis/python-dialogflow-cx/commit/20df7c3bfabef5da23970512a3f925f4dfe7d2f9)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([20df7c3](https://www.github.com/googleapis/python-dialogflow-cx/commit/20df7c3bfabef5da23970512a3f925f4dfe7d2f9)) +* require google-api-core>=1.22.2 ([3440f18](https://www.github.com/googleapis/python-dialogflow-cx/commit/3440f186cd879fd4ddc9b3442bf857a4f286698e)) +* **v3:** BREAKING rename `UserInput.input_` to `UserInput.input` ([#58](https://www.github.com/googleapis/python-dialogflow-cx/issues/58)) ([3440f18](https://www.github.com/googleapis/python-dialogflow-cx/commit/3440f186cd879fd4ddc9b3442bf857a4f286698e)) + + +### Documentation + +* clarified documentation for security settings docs: clarified documentation for session parameters ([#89](https://www.github.com/googleapis/python-dialogflow-cx/issues/89)) ([750a055](https://www.github.com/googleapis/python-dialogflow-cx/commit/750a055b688ebeda8e8882cdb02bdc87524a69a5)) +* clarified documentation for security settings docs: clarified documentation for session parameters ([#90](https://www.github.com/googleapis/python-dialogflow-cx/issues/90)) ([c1c0fb9](https://www.github.com/googleapis/python-dialogflow-cx/commit/c1c0fb9eb9e62dc794aef1bac357bb5c20e322dc)) +* clarified experiment length ([20df7c3](https://www.github.com/googleapis/python-dialogflow-cx/commit/20df7c3bfabef5da23970512a3f925f4dfe7d2f9)) +* clarify resource format for session response. ([20df7c3](https://www.github.com/googleapis/python-dialogflow-cx/commit/20df7c3bfabef5da23970512a3f925f4dfe7d2f9)) +* Update docs on Pages, Session, Version, etc. ([20df7c3](https://www.github.com/googleapis/python-dialogflow-cx/commit/20df7c3bfabef5da23970512a3f925f4dfe7d2f9)) + +## [0.4.1](https://www.github.com/googleapis/python-dialogflow-cx/compare/v0.4.0...v0.4.1) (2021-03-07) + + +### Documentation + +* fix readme ([#52](https://www.github.com/googleapis/python-dialogflow-cx/issues/52)) ([8728ad4](https://www.github.com/googleapis/python-dialogflow-cx/commit/8728ad4018bf9c976cdc469af3d8a7ec89c04671)) + +## [0.4.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v0.3.0...v0.4.0) (2021-03-05) + + +### Features + +* add from_service_account_info factory ([d9bd192](https://www.github.com/googleapis/python-dialogflow-cx/commit/d9bd192a87bc8a4462da3bdbda362b359d86dd65)) +* Add new Experiment service ([d9bd192](https://www.github.com/googleapis/python-dialogflow-cx/commit/d9bd192a87bc8a4462da3bdbda362b359d86dd65)) +* added support for test cases and agent validation ([d9bd192](https://www.github.com/googleapis/python-dialogflow-cx/commit/d9bd192a87bc8a4462da3bdbda362b359d86dd65)) +* allow to disable webhook invocation per request ([d9bd192](https://www.github.com/googleapis/python-dialogflow-cx/commit/d9bd192a87bc8a4462da3bdbda362b359d86dd65)) +* supports SentimentAnalysisResult in webhook request ([d9bd192](https://www.github.com/googleapis/python-dialogflow-cx/commit/d9bd192a87bc8a4462da3bdbda362b359d86dd65)) + + +### Documentation + +* test cases doc update ([d9bd192](https://www.github.com/googleapis/python-dialogflow-cx/commit/d9bd192a87bc8a4462da3bdbda362b359d86dd65)) +* update languages link ([d9bd192](https://www.github.com/googleapis/python-dialogflow-cx/commit/d9bd192a87bc8a4462da3bdbda362b359d86dd65)) + +## [0.3.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v0.2.0...v0.3.0) (2021-01-29) + + +### Features + +* add experiments API ([#36](https://www.github.com/googleapis/python-dialogflow-cx/issues/36)) ([5381512](https://www.github.com/googleapis/python-dialogflow-cx/commit/5381512872ca2492ddabcbdd7ccde5f054aed011)) +* allowed custom to specify webhook headers through query parameters ([#32](https://www.github.com/googleapis/python-dialogflow-cx/issues/32)) ([09919b0](https://www.github.com/googleapis/python-dialogflow-cx/commit/09919b0e45517cedcbb1d8b5b931c7317be549b2)) +* allowed custom to specify webhook headers through query parameters ([#32](https://www.github.com/googleapis/python-dialogflow-cx/issues/32)) ([09919b0](https://www.github.com/googleapis/python-dialogflow-cx/commit/09919b0e45517cedcbb1d8b5b931c7317be549b2)) + + +### Bug Fixes + +* remove gRPC send/recv limit; add enums to `types/__init__.py` ([09919b0](https://www.github.com/googleapis/python-dialogflow-cx/commit/09919b0e45517cedcbb1d8b5b931c7317be549b2)) + +## [0.2.0](https://www.github.com/googleapis/python-dialogflow-cx/compare/v0.1.1...v0.2.0) (2020-12-07) + + +### Features + +* add v3 ([#21](https://www.github.com/googleapis/python-dialogflow-cx/issues/21)) ([97c7fb5](https://www.github.com/googleapis/python-dialogflow-cx/commit/97c7fb53e5f6af7d8b0fea3043c60da9ee1f549b)) + +## [0.1.1](https://www.github.com/googleapis/python-dialogflow-cx/compare/v0.1.0...v0.1.1) (2020-11-17) + + +### Bug Fixes + +* corrects the repo/homepage link ([#15](https://www.github.com/googleapis/python-dialogflow-cx/issues/15)) ([c26852d](https://www.github.com/googleapis/python-dialogflow-cx/commit/c26852d8a3738eb4d67222c555d0197a854e68a9)) + + +### Documentation + +* **samples:** add initial sample codes ([#13](https://www.github.com/googleapis/python-dialogflow-cx/issues/13)) ([b590308](https://www.github.com/googleapis/python-dialogflow-cx/commit/b590308b79a230561aed776f55260a73668c8efc)), closes [#12](https://www.github.com/googleapis/python-dialogflow-cx/issues/12) + +## 0.1.0 (2020-08-24) + + +### Features + +* generate v3beta1 ([0c6e3a9](https://www.github.com/googleapis/python-dialogflow-cx/commit/0c6e3a9ff1a38f6d6c5f8c2983cbfa1f7ff7536d)) diff --git a/packages/google-cloud-dialogflow-cx/CODE_OF_CONDUCT.md b/packages/google-cloud-dialogflow-cx/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-dialogflow-cx/CONTRIBUTING.rst b/packages/google-cloud-dialogflow-cx/CONTRIBUTING.rst new file mode 100644 index 000000000000..edca94cc39b8 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system- -- -k + + + .. note:: + + System tests are only configured to run under Python. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-dialogflow-cx + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-dialogflow-cx/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-dialogflow-cx/LICENSE b/packages/google-cloud-dialogflow-cx/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-dialogflow-cx/MANIFEST.in b/packages/google-cloud-dialogflow-cx/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-dialogflow-cx/README.rst b/packages/google-cloud-dialogflow-cx/README.rst new file mode 100644 index 000000000000..4076aad4cf5b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/README.rst @@ -0,0 +1,108 @@ +Python Client for Dialogflow CX +=============================== + +|stable| |pypi| |versions| + +`Dialogflow CX`_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-dialogflow-cx.svg + :target: https://pypi.org/project/google-cloud-dialogflow-cx/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-dialogflow-cx.svg + :target: https://pypi.org/project/google-cloud-dialogflow-cx/ +.. _Dialogflow CX: https://cloud.google.com/dialogflow/cx/docs +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/dialogflow-cx/latest +.. _Product Documentation: https://cloud.google.com/dialogflow/cx/docs + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Dialogflow CX.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Dialogflow CX.: https://cloud.google.com/dialogflow/cx/docs +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-dialogflow-cx + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-dialogflow-cx + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Dialogflow CX + to see other available methods on the client. +- Read the `Dialogflow CX Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Dialogflow CX Product documentation: https://cloud.google.com/dialogflow/cx/docs +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-dialogflow-cx/SECURITY.md b/packages/google-cloud-dialogflow-cx/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-dialogflow-cx/dialogflow-cx-v3-py.tar.gz b/packages/google-cloud-dialogflow-cx/dialogflow-cx-v3-py.tar.gz new file mode 100644 index 000000000000..843316ab347b Binary files /dev/null and b/packages/google-cloud-dialogflow-cx/dialogflow-cx-v3-py.tar.gz differ diff --git a/packages/google-cloud-dialogflow-cx/dialogflow-cx-v3beta1-py.tar.gz b/packages/google-cloud-dialogflow-cx/dialogflow-cx-v3beta1-py.tar.gz new file mode 100644 index 000000000000..1086ecd4fd03 Binary files /dev/null and b/packages/google-cloud-dialogflow-cx/dialogflow-cx-v3beta1-py.tar.gz differ diff --git a/packages/google-cloud-dialogflow-cx/docs/CHANGELOG.md b/packages/google-cloud-dialogflow-cx/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-dialogflow-cx/docs/README.rst b/packages/google-cloud-dialogflow-cx/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-dialogflow-cx/docs/_static/custom.css b/packages/google-cloud-dialogflow-cx/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-dialogflow-cx/docs/_templates/layout.html b/packages/google-cloud-dialogflow-cx/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-dialogflow-cx/docs/conf.py b/packages/google-cloud-dialogflow-cx/docs/conf.py new file mode 100644 index 000000000000..af586c43f007 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-dialogflow-cx documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-dialogflow-cx" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-dialogflow-cx", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dialogflow-cx-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dialogflow-cx.tex", + "google-cloud-dialogflow-cx Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dialogflow-cx", + "google-cloud-dialogflow-cx Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dialogflow-cx", + "google-cloud-dialogflow-cx Documentation", + author, + "google-cloud-dialogflow-cx", + "google-cloud-dialogflow-cx Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/agents.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/agents.rst new file mode 100644 index 000000000000..4deb173009c6 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/agents.rst @@ -0,0 +1,10 @@ +Agents +------------------------ + +.. automodule:: google.cloud.dialogflowcx_v3.services.agents + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.agents.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/changelogs.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/changelogs.rst new file mode 100644 index 000000000000..1f064c53c78a --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/changelogs.rst @@ -0,0 +1,10 @@ +Changelogs +---------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.changelogs + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.changelogs.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/deployments.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/deployments.rst new file mode 100644 index 000000000000..1f065b30ddd8 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/deployments.rst @@ -0,0 +1,10 @@ +Deployments +----------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.deployments + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.deployments.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/entity_types.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/entity_types.rst new file mode 100644 index 000000000000..08e4b6e82e86 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/entity_types.rst @@ -0,0 +1,10 @@ +EntityTypes +----------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.entity_types + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.entity_types.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/environments.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/environments.rst new file mode 100644 index 000000000000..246ee15adbd9 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/environments.rst @@ -0,0 +1,10 @@ +Environments +------------------------------ + +.. automodule:: google.cloud.dialogflowcx_v3.services.environments + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.environments.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/experiments.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/experiments.rst new file mode 100644 index 000000000000..5cba0e2a0d7f --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/experiments.rst @@ -0,0 +1,10 @@ +Experiments +----------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.experiments + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.experiments.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/flows.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/flows.rst new file mode 100644 index 000000000000..77f6398e448b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/flows.rst @@ -0,0 +1,10 @@ +Flows +----------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.flows + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.flows.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/intents.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/intents.rst new file mode 100644 index 000000000000..8b9e6dda6ed1 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/intents.rst @@ -0,0 +1,10 @@ +Intents +------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.intents + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.intents.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/pages.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/pages.rst new file mode 100644 index 000000000000..0bbfbced9639 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/pages.rst @@ -0,0 +1,10 @@ +Pages +----------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.pages + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.pages.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/security_settings_service.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/security_settings_service.rst new file mode 100644 index 000000000000..4b4fa0077545 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/security_settings_service.rst @@ -0,0 +1,10 @@ +SecuritySettingsService +----------------------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.security_settings_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.security_settings_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/services.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/services.rst new file mode 100644 index 000000000000..6a19e426707e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/services.rst @@ -0,0 +1,21 @@ +Services for Google Cloud Dialogflowcx v3 API +============================================= +.. toctree:: + :maxdepth: 2 + + agents + changelogs + deployments + entity_types + environments + experiments + flows + intents + pages + security_settings_service + session_entity_types + sessions + test_cases + transition_route_groups + versions + webhooks diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/session_entity_types.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/session_entity_types.rst new file mode 100644 index 000000000000..46ff0f6e49b6 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/session_entity_types.rst @@ -0,0 +1,10 @@ +SessionEntityTypes +------------------------------------ + +.. automodule:: google.cloud.dialogflowcx_v3.services.session_entity_types + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.session_entity_types.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/sessions.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/sessions.rst new file mode 100644 index 000000000000..494bceb1375b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/sessions.rst @@ -0,0 +1,6 @@ +Sessions +-------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.sessions + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/test_cases.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/test_cases.rst new file mode 100644 index 000000000000..3cad4dd45ddc --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/test_cases.rst @@ -0,0 +1,10 @@ +TestCases +--------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.test_cases + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.test_cases.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/transition_route_groups.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/transition_route_groups.rst new file mode 100644 index 000000000000..d5549da965f7 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/transition_route_groups.rst @@ -0,0 +1,10 @@ +TransitionRouteGroups +--------------------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.transition_route_groups + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.transition_route_groups.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/types.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/types.rst new file mode 100644 index 000000000000..eee8146a1d62 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Dialogflowcx v3 API +========================================== + +.. automodule:: google.cloud.dialogflowcx_v3.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/versions.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/versions.rst new file mode 100644 index 000000000000..2d72f02de565 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/versions.rst @@ -0,0 +1,10 @@ +Versions +-------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.versions + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.versions.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/webhooks.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/webhooks.rst new file mode 100644 index 000000000000..2e2f9b8cfa7f --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3/webhooks.rst @@ -0,0 +1,10 @@ +Webhooks +-------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3.services.webhooks + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3.services.webhooks.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/agents.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/agents.rst new file mode 100644 index 000000000000..511ac6f987eb --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/agents.rst @@ -0,0 +1,10 @@ +Agents +------------------------ + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.agents + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.agents.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/changelogs.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/changelogs.rst new file mode 100644 index 000000000000..9fc3d9e168e6 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/changelogs.rst @@ -0,0 +1,10 @@ +Changelogs +---------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.changelogs + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.changelogs.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/deployments.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/deployments.rst new file mode 100644 index 000000000000..53b2aeaf76b3 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/deployments.rst @@ -0,0 +1,10 @@ +Deployments +----------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.deployments + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.deployments.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/entity_types.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/entity_types.rst new file mode 100644 index 000000000000..3878a96e0a39 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/entity_types.rst @@ -0,0 +1,10 @@ +EntityTypes +----------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.entity_types + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.entity_types.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/environments.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/environments.rst new file mode 100644 index 000000000000..0e31d34dc88b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/environments.rst @@ -0,0 +1,10 @@ +Environments +------------------------------ + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.environments + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.environments.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/experiments.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/experiments.rst new file mode 100644 index 000000000000..5c185e4fc968 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/experiments.rst @@ -0,0 +1,10 @@ +Experiments +----------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.experiments + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.experiments.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/flows.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/flows.rst new file mode 100644 index 000000000000..0183ff3b927e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/flows.rst @@ -0,0 +1,10 @@ +Flows +----------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.flows + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.flows.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/intents.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/intents.rst new file mode 100644 index 000000000000..3bf9d37cf11d --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/intents.rst @@ -0,0 +1,10 @@ +Intents +------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.intents + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.intents.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/pages.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/pages.rst new file mode 100644 index 000000000000..be3f1b162500 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/pages.rst @@ -0,0 +1,10 @@ +Pages +----------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.pages + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.pages.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/security_settings_service.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/security_settings_service.rst new file mode 100644 index 000000000000..0489568a5fff --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/security_settings_service.rst @@ -0,0 +1,10 @@ +SecuritySettingsService +----------------------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.security_settings_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.security_settings_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/services.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/services.rst new file mode 100644 index 000000000000..2029fb08c947 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/services.rst @@ -0,0 +1,21 @@ +Services for Google Cloud Dialogflowcx v3beta1 API +================================================== +.. toctree:: + :maxdepth: 2 + + agents + changelogs + deployments + entity_types + environments + experiments + flows + intents + pages + security_settings_service + session_entity_types + sessions + test_cases + transition_route_groups + versions + webhooks diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/session_entity_types.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/session_entity_types.rst new file mode 100644 index 000000000000..a62488358819 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/session_entity_types.rst @@ -0,0 +1,10 @@ +SessionEntityTypes +------------------------------------ + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.session_entity_types + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.session_entity_types.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/sessions.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/sessions.rst new file mode 100644 index 000000000000..3f792bf8f72c --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/sessions.rst @@ -0,0 +1,6 @@ +Sessions +-------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.sessions + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/test_cases.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/test_cases.rst new file mode 100644 index 000000000000..a212f0b4abf5 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/test_cases.rst @@ -0,0 +1,10 @@ +TestCases +--------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.test_cases + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.test_cases.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/transition_route_groups.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/transition_route_groups.rst new file mode 100644 index 000000000000..9b29ddf9577a --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/transition_route_groups.rst @@ -0,0 +1,10 @@ +TransitionRouteGroups +--------------------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.transition_route_groups + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.transition_route_groups.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/types.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/types.rst new file mode 100644 index 000000000000..3c1160db7b56 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Dialogflowcx v3beta1 API +=============================================== + +.. automodule:: google.cloud.dialogflowcx_v3beta1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/versions.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/versions.rst new file mode 100644 index 000000000000..6617fb667b8e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/versions.rst @@ -0,0 +1,10 @@ +Versions +-------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.versions + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.versions.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/webhooks.rst b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/webhooks.rst new file mode 100644 index 000000000000..4fa96e0f737d --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/dialogflowcx_v3beta1/webhooks.rst @@ -0,0 +1,10 @@ +Webhooks +-------------------------- + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.webhooks + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflowcx_v3beta1.services.webhooks.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow-cx/docs/index.rst b/packages/google-cloud-dialogflow-cx/docs/index.rst new file mode 100644 index 000000000000..5fea94ffd3da --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/index.rst @@ -0,0 +1,34 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of Dialogflow CX. +By default, you will get version ``dialogflowcx_v3``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dialogflowcx_v3/services + dialogflowcx_v3/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dialogflowcx_v3beta1/services + dialogflowcx_v3beta1/types + + +Changelog +--------- + +For a list of all ``google-cloud-dialogflow-cx`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-dialogflow-cx/docs/multiprocessing.rst b/packages/google-cloud-dialogflow-cx/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/__init__.py new file mode 100644 index 000000000000..5954cfbe8fe4 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/__init__.py @@ -0,0 +1,589 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dialogflowcx import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.dialogflowcx_v3.services.agents.async_client import AgentsAsyncClient +from google.cloud.dialogflowcx_v3.services.agents.client import AgentsClient +from google.cloud.dialogflowcx_v3.services.changelogs.async_client import ( + ChangelogsAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.changelogs.client import ChangelogsClient +from google.cloud.dialogflowcx_v3.services.deployments.async_client import ( + DeploymentsAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.deployments.client import DeploymentsClient +from google.cloud.dialogflowcx_v3.services.entity_types.async_client import ( + EntityTypesAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.entity_types.client import EntityTypesClient +from google.cloud.dialogflowcx_v3.services.environments.async_client import ( + EnvironmentsAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.environments.client import EnvironmentsClient +from google.cloud.dialogflowcx_v3.services.experiments.async_client import ( + ExperimentsAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.experiments.client import ExperimentsClient +from google.cloud.dialogflowcx_v3.services.flows.async_client import FlowsAsyncClient +from google.cloud.dialogflowcx_v3.services.flows.client import FlowsClient +from google.cloud.dialogflowcx_v3.services.intents.async_client import ( + IntentsAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.intents.client import IntentsClient +from google.cloud.dialogflowcx_v3.services.pages.async_client import PagesAsyncClient +from google.cloud.dialogflowcx_v3.services.pages.client import PagesClient +from google.cloud.dialogflowcx_v3.services.security_settings_service.async_client import ( + SecuritySettingsServiceAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.security_settings_service.client import ( + SecuritySettingsServiceClient, +) +from google.cloud.dialogflowcx_v3.services.session_entity_types.async_client import ( + SessionEntityTypesAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.session_entity_types.client import ( + SessionEntityTypesClient, +) +from google.cloud.dialogflowcx_v3.services.sessions.async_client import ( + SessionsAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.sessions.client import SessionsClient +from google.cloud.dialogflowcx_v3.services.test_cases.async_client import ( + TestCasesAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.test_cases.client import TestCasesClient +from google.cloud.dialogflowcx_v3.services.transition_route_groups.async_client import ( + TransitionRouteGroupsAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.transition_route_groups.client import ( + TransitionRouteGroupsClient, +) +from google.cloud.dialogflowcx_v3.services.versions.async_client import ( + VersionsAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.versions.client import VersionsClient +from google.cloud.dialogflowcx_v3.services.webhooks.async_client import ( + WebhooksAsyncClient, +) +from google.cloud.dialogflowcx_v3.services.webhooks.client import WebhooksClient +from google.cloud.dialogflowcx_v3.types.advanced_settings import AdvancedSettings +from google.cloud.dialogflowcx_v3.types.agent import ( + Agent, + AgentValidationResult, + CreateAgentRequest, + DeleteAgentRequest, + ExportAgentRequest, + ExportAgentResponse, + GetAgentRequest, + GetAgentValidationResultRequest, + GetGenerativeSettingsRequest, + ListAgentsRequest, + ListAgentsResponse, + RestoreAgentRequest, + SpeechToTextSettings, + UpdateAgentRequest, + UpdateGenerativeSettingsRequest, + ValidateAgentRequest, +) +from google.cloud.dialogflowcx_v3.types.audio_config import ( + AudioEncoding, + InputAudioConfig, + OutputAudioConfig, + OutputAudioEncoding, + SpeechModelVariant, + SpeechWordInfo, + SsmlVoiceGender, + SynthesizeSpeechConfig, + TextToSpeechSettings, + VoiceSelectionParams, +) +from google.cloud.dialogflowcx_v3.types.changelog import ( + Changelog, + GetChangelogRequest, + ListChangelogsRequest, + ListChangelogsResponse, +) +from google.cloud.dialogflowcx_v3.types.data_store_connection import ( + DataStoreConnection, + DataStoreType, +) +from google.cloud.dialogflowcx_v3.types.deployment import ( + Deployment, + GetDeploymentRequest, + ListDeploymentsRequest, + ListDeploymentsResponse, +) +from google.cloud.dialogflowcx_v3.types.entity_type import ( + CreateEntityTypeRequest, + DeleteEntityTypeRequest, + EntityType, + GetEntityTypeRequest, + ListEntityTypesRequest, + ListEntityTypesResponse, + UpdateEntityTypeRequest, +) +from google.cloud.dialogflowcx_v3.types.environment import ( + ContinuousTestResult, + CreateEnvironmentRequest, + DeleteEnvironmentRequest, + DeployFlowMetadata, + DeployFlowRequest, + DeployFlowResponse, + Environment, + GetEnvironmentRequest, + ListContinuousTestResultsRequest, + ListContinuousTestResultsResponse, + ListEnvironmentsRequest, + ListEnvironmentsResponse, + LookupEnvironmentHistoryRequest, + LookupEnvironmentHistoryResponse, + RunContinuousTestMetadata, + RunContinuousTestRequest, + RunContinuousTestResponse, + UpdateEnvironmentRequest, +) +from google.cloud.dialogflowcx_v3.types.experiment import ( + CreateExperimentRequest, + DeleteExperimentRequest, + Experiment, + GetExperimentRequest, + ListExperimentsRequest, + ListExperimentsResponse, + RolloutConfig, + RolloutState, + StartExperimentRequest, + StopExperimentRequest, + UpdateExperimentRequest, + VariantsHistory, + VersionVariants, +) +from google.cloud.dialogflowcx_v3.types.flow import ( + CreateFlowRequest, + DeleteFlowRequest, + ExportFlowRequest, + ExportFlowResponse, + Flow, + FlowImportStrategy, + FlowValidationResult, + GetFlowRequest, + GetFlowValidationResultRequest, + ImportFlowRequest, + ImportFlowResponse, + ListFlowsRequest, + ListFlowsResponse, + NluSettings, + TrainFlowRequest, + UpdateFlowRequest, + ValidateFlowRequest, +) +from google.cloud.dialogflowcx_v3.types.fulfillment import Fulfillment +from google.cloud.dialogflowcx_v3.types.gcs import GcsDestination +from google.cloud.dialogflowcx_v3.types.generative_settings import GenerativeSettings +from google.cloud.dialogflowcx_v3.types.import_strategy import ImportStrategy +from google.cloud.dialogflowcx_v3.types.intent import ( + CreateIntentRequest, + DeleteIntentRequest, + GetIntentRequest, + Intent, + IntentView, + ListIntentsRequest, + ListIntentsResponse, + UpdateIntentRequest, +) +from google.cloud.dialogflowcx_v3.types.page import ( + CreatePageRequest, + DeletePageRequest, + EventHandler, + Form, + GetPageRequest, + KnowledgeConnectorSettings, + ListPagesRequest, + ListPagesResponse, + Page, + TransitionRoute, + UpdatePageRequest, +) +from google.cloud.dialogflowcx_v3.types.response_message import ResponseMessage +from google.cloud.dialogflowcx_v3.types.safety_settings import SafetySettings +from google.cloud.dialogflowcx_v3.types.security_settings import ( + CreateSecuritySettingsRequest, + DeleteSecuritySettingsRequest, + GetSecuritySettingsRequest, + ListSecuritySettingsRequest, + ListSecuritySettingsResponse, + SecuritySettings, + UpdateSecuritySettingsRequest, +) +from google.cloud.dialogflowcx_v3.types.session import ( + AudioInput, + CloudConversationDebuggingInfo, + DetectIntentRequest, + DetectIntentResponse, + DtmfInput, + EventInput, + FulfillIntentRequest, + FulfillIntentResponse, + IntentInput, + Match, + MatchIntentRequest, + MatchIntentResponse, + QueryInput, + QueryParameters, + QueryResult, + SentimentAnalysisResult, + StreamingDetectIntentRequest, + StreamingDetectIntentResponse, + StreamingRecognitionResult, + TextInput, +) +from google.cloud.dialogflowcx_v3.types.session_entity_type import ( + CreateSessionEntityTypeRequest, + DeleteSessionEntityTypeRequest, + GetSessionEntityTypeRequest, + ListSessionEntityTypesRequest, + ListSessionEntityTypesResponse, + SessionEntityType, + UpdateSessionEntityTypeRequest, +) +from google.cloud.dialogflowcx_v3.types.test_case import ( + BatchDeleteTestCasesRequest, + BatchRunTestCasesMetadata, + BatchRunTestCasesRequest, + BatchRunTestCasesResponse, + CalculateCoverageRequest, + CalculateCoverageResponse, + ConversationTurn, + CreateTestCaseRequest, + ExportTestCasesMetadata, + ExportTestCasesRequest, + ExportTestCasesResponse, + GetTestCaseRequest, + GetTestCaseResultRequest, + ImportTestCasesMetadata, + ImportTestCasesRequest, + ImportTestCasesResponse, + IntentCoverage, + ListTestCaseResultsRequest, + ListTestCaseResultsResponse, + ListTestCasesRequest, + ListTestCasesResponse, + RunTestCaseMetadata, + RunTestCaseRequest, + RunTestCaseResponse, + TestCase, + TestCaseError, + TestCaseResult, + TestConfig, + TestError, + TestResult, + TestRunDifference, + TransitionCoverage, + TransitionRouteGroupCoverage, + UpdateTestCaseRequest, +) +from google.cloud.dialogflowcx_v3.types.transition_route_group import ( + CreateTransitionRouteGroupRequest, + DeleteTransitionRouteGroupRequest, + GetTransitionRouteGroupRequest, + ListTransitionRouteGroupsRequest, + ListTransitionRouteGroupsResponse, + TransitionRouteGroup, + UpdateTransitionRouteGroupRequest, +) +from google.cloud.dialogflowcx_v3.types.validation_message import ( + ResourceName, + ValidationMessage, +) +from google.cloud.dialogflowcx_v3.types.version import ( + CompareVersionsRequest, + CompareVersionsResponse, + CreateVersionOperationMetadata, + CreateVersionRequest, + DeleteVersionRequest, + GetVersionRequest, + ListVersionsRequest, + ListVersionsResponse, + LoadVersionRequest, + UpdateVersionRequest, + Version, +) +from google.cloud.dialogflowcx_v3.types.webhook import ( + CreateWebhookRequest, + DeleteWebhookRequest, + GetWebhookRequest, + ListWebhooksRequest, + ListWebhooksResponse, + PageInfo, + SessionInfo, + UpdateWebhookRequest, + Webhook, + WebhookRequest, + WebhookResponse, +) + +__all__ = ( + "AgentsClient", + "AgentsAsyncClient", + "ChangelogsClient", + "ChangelogsAsyncClient", + "DeploymentsClient", + "DeploymentsAsyncClient", + "EntityTypesClient", + "EntityTypesAsyncClient", + "EnvironmentsClient", + "EnvironmentsAsyncClient", + "ExperimentsClient", + "ExperimentsAsyncClient", + "FlowsClient", + "FlowsAsyncClient", + "IntentsClient", + "IntentsAsyncClient", + "PagesClient", + "PagesAsyncClient", + "SecuritySettingsServiceClient", + "SecuritySettingsServiceAsyncClient", + "SessionEntityTypesClient", + "SessionEntityTypesAsyncClient", + "SessionsClient", + "SessionsAsyncClient", + "TestCasesClient", + "TestCasesAsyncClient", + "TransitionRouteGroupsClient", + "TransitionRouteGroupsAsyncClient", + "VersionsClient", + "VersionsAsyncClient", + "WebhooksClient", + "WebhooksAsyncClient", + "AdvancedSettings", + "Agent", + "AgentValidationResult", + "CreateAgentRequest", + "DeleteAgentRequest", + "ExportAgentRequest", + "ExportAgentResponse", + "GetAgentRequest", + "GetAgentValidationResultRequest", + "GetGenerativeSettingsRequest", + "ListAgentsRequest", + "ListAgentsResponse", + "RestoreAgentRequest", + "SpeechToTextSettings", + "UpdateAgentRequest", + "UpdateGenerativeSettingsRequest", + "ValidateAgentRequest", + "InputAudioConfig", + "OutputAudioConfig", + "SpeechWordInfo", + "SynthesizeSpeechConfig", + "TextToSpeechSettings", + "VoiceSelectionParams", + "AudioEncoding", + "OutputAudioEncoding", + "SpeechModelVariant", + "SsmlVoiceGender", + "Changelog", + "GetChangelogRequest", + "ListChangelogsRequest", + "ListChangelogsResponse", + "DataStoreConnection", + "DataStoreType", + "Deployment", + "GetDeploymentRequest", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "CreateEntityTypeRequest", + "DeleteEntityTypeRequest", + "EntityType", + "GetEntityTypeRequest", + "ListEntityTypesRequest", + "ListEntityTypesResponse", + "UpdateEntityTypeRequest", + "ContinuousTestResult", + "CreateEnvironmentRequest", + "DeleteEnvironmentRequest", + "DeployFlowMetadata", + "DeployFlowRequest", + "DeployFlowResponse", + "Environment", + "GetEnvironmentRequest", + "ListContinuousTestResultsRequest", + "ListContinuousTestResultsResponse", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", + "LookupEnvironmentHistoryRequest", + "LookupEnvironmentHistoryResponse", + "RunContinuousTestMetadata", + "RunContinuousTestRequest", + "RunContinuousTestResponse", + "UpdateEnvironmentRequest", + "CreateExperimentRequest", + "DeleteExperimentRequest", + "Experiment", + "GetExperimentRequest", + "ListExperimentsRequest", + "ListExperimentsResponse", + "RolloutConfig", + "RolloutState", + "StartExperimentRequest", + "StopExperimentRequest", + "UpdateExperimentRequest", + "VariantsHistory", + "VersionVariants", + "CreateFlowRequest", + "DeleteFlowRequest", + "ExportFlowRequest", + "ExportFlowResponse", + "Flow", + "FlowImportStrategy", + "FlowValidationResult", + "GetFlowRequest", + "GetFlowValidationResultRequest", + "ImportFlowRequest", + "ImportFlowResponse", + "ListFlowsRequest", + "ListFlowsResponse", + "NluSettings", + "TrainFlowRequest", + "UpdateFlowRequest", + "ValidateFlowRequest", + "Fulfillment", + "GcsDestination", + "GenerativeSettings", + "ImportStrategy", + "CreateIntentRequest", + "DeleteIntentRequest", + "GetIntentRequest", + "Intent", + "ListIntentsRequest", + "ListIntentsResponse", + "UpdateIntentRequest", + "IntentView", + "CreatePageRequest", + "DeletePageRequest", + "EventHandler", + "Form", + "GetPageRequest", + "KnowledgeConnectorSettings", + "ListPagesRequest", + "ListPagesResponse", + "Page", + "TransitionRoute", + "UpdatePageRequest", + "ResponseMessage", + "SafetySettings", + "CreateSecuritySettingsRequest", + "DeleteSecuritySettingsRequest", + "GetSecuritySettingsRequest", + "ListSecuritySettingsRequest", + "ListSecuritySettingsResponse", + "SecuritySettings", + "UpdateSecuritySettingsRequest", + "AudioInput", + "CloudConversationDebuggingInfo", + "DetectIntentRequest", + "DetectIntentResponse", + "DtmfInput", + "EventInput", + "FulfillIntentRequest", + "FulfillIntentResponse", + "IntentInput", + "Match", + "MatchIntentRequest", + "MatchIntentResponse", + "QueryInput", + "QueryParameters", + "QueryResult", + "SentimentAnalysisResult", + "StreamingDetectIntentRequest", + "StreamingDetectIntentResponse", + "StreamingRecognitionResult", + "TextInput", + "CreateSessionEntityTypeRequest", + "DeleteSessionEntityTypeRequest", + "GetSessionEntityTypeRequest", + "ListSessionEntityTypesRequest", + "ListSessionEntityTypesResponse", + "SessionEntityType", + "UpdateSessionEntityTypeRequest", + "BatchDeleteTestCasesRequest", + "BatchRunTestCasesMetadata", + "BatchRunTestCasesRequest", + "BatchRunTestCasesResponse", + "CalculateCoverageRequest", + "CalculateCoverageResponse", + "ConversationTurn", + "CreateTestCaseRequest", + "ExportTestCasesMetadata", + "ExportTestCasesRequest", + "ExportTestCasesResponse", + "GetTestCaseRequest", + "GetTestCaseResultRequest", + "ImportTestCasesMetadata", + "ImportTestCasesRequest", + "ImportTestCasesResponse", + "IntentCoverage", + "ListTestCaseResultsRequest", + "ListTestCaseResultsResponse", + "ListTestCasesRequest", + "ListTestCasesResponse", + "RunTestCaseMetadata", + "RunTestCaseRequest", + "RunTestCaseResponse", + "TestCase", + "TestCaseError", + "TestCaseResult", + "TestConfig", + "TestError", + "TestRunDifference", + "TransitionCoverage", + "TransitionRouteGroupCoverage", + "UpdateTestCaseRequest", + "TestResult", + "CreateTransitionRouteGroupRequest", + "DeleteTransitionRouteGroupRequest", + "GetTransitionRouteGroupRequest", + "ListTransitionRouteGroupsRequest", + "ListTransitionRouteGroupsResponse", + "TransitionRouteGroup", + "UpdateTransitionRouteGroupRequest", + "ResourceName", + "ValidationMessage", + "CompareVersionsRequest", + "CompareVersionsResponse", + "CreateVersionOperationMetadata", + "CreateVersionRequest", + "DeleteVersionRequest", + "GetVersionRequest", + "ListVersionsRequest", + "ListVersionsResponse", + "LoadVersionRequest", + "UpdateVersionRequest", + "Version", + "CreateWebhookRequest", + "DeleteWebhookRequest", + "GetWebhookRequest", + "ListWebhooksRequest", + "ListWebhooksResponse", + "PageInfo", + "SessionInfo", + "UpdateWebhookRequest", + "Webhook", + "WebhookRequest", + "WebhookResponse", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py new file mode 100644 index 000000000000..6972597de6c2 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/py.typed b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/py.typed new file mode 100644 index 000000000000..e6386621b1bd --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dialogflow-cx package uses inline types. diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/__init__.py new file mode 100644 index 000000000000..ba7a88c5d5c9 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/__init__.py @@ -0,0 +1,544 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.agents import AgentsAsyncClient, AgentsClient +from .services.changelogs import ChangelogsAsyncClient, ChangelogsClient +from .services.deployments import DeploymentsAsyncClient, DeploymentsClient +from .services.entity_types import EntityTypesAsyncClient, EntityTypesClient +from .services.environments import EnvironmentsAsyncClient, EnvironmentsClient +from .services.experiments import ExperimentsAsyncClient, ExperimentsClient +from .services.flows import FlowsAsyncClient, FlowsClient +from .services.intents import IntentsAsyncClient, IntentsClient +from .services.pages import PagesAsyncClient, PagesClient +from .services.security_settings_service import ( + SecuritySettingsServiceAsyncClient, + SecuritySettingsServiceClient, +) +from .services.session_entity_types import ( + SessionEntityTypesAsyncClient, + SessionEntityTypesClient, +) +from .services.sessions import SessionsAsyncClient, SessionsClient +from .services.test_cases import TestCasesAsyncClient, TestCasesClient +from .services.transition_route_groups import ( + TransitionRouteGroupsAsyncClient, + TransitionRouteGroupsClient, +) +from .services.versions import VersionsAsyncClient, VersionsClient +from .services.webhooks import WebhooksAsyncClient, WebhooksClient +from .types.advanced_settings import AdvancedSettings +from .types.agent import ( + Agent, + AgentValidationResult, + CreateAgentRequest, + DeleteAgentRequest, + ExportAgentRequest, + ExportAgentResponse, + GetAgentRequest, + GetAgentValidationResultRequest, + GetGenerativeSettingsRequest, + ListAgentsRequest, + ListAgentsResponse, + RestoreAgentRequest, + SpeechToTextSettings, + UpdateAgentRequest, + UpdateGenerativeSettingsRequest, + ValidateAgentRequest, +) +from .types.audio_config import ( + AudioEncoding, + InputAudioConfig, + OutputAudioConfig, + OutputAudioEncoding, + SpeechModelVariant, + SpeechWordInfo, + SsmlVoiceGender, + SynthesizeSpeechConfig, + TextToSpeechSettings, + VoiceSelectionParams, +) +from .types.changelog import ( + Changelog, + GetChangelogRequest, + ListChangelogsRequest, + ListChangelogsResponse, +) +from .types.data_store_connection import DataStoreConnection, DataStoreType +from .types.deployment import ( + Deployment, + GetDeploymentRequest, + ListDeploymentsRequest, + ListDeploymentsResponse, +) +from .types.entity_type import ( + CreateEntityTypeRequest, + DeleteEntityTypeRequest, + EntityType, + GetEntityTypeRequest, + ListEntityTypesRequest, + ListEntityTypesResponse, + UpdateEntityTypeRequest, +) +from .types.environment import ( + ContinuousTestResult, + CreateEnvironmentRequest, + DeleteEnvironmentRequest, + DeployFlowMetadata, + DeployFlowRequest, + DeployFlowResponse, + Environment, + GetEnvironmentRequest, + ListContinuousTestResultsRequest, + ListContinuousTestResultsResponse, + ListEnvironmentsRequest, + ListEnvironmentsResponse, + LookupEnvironmentHistoryRequest, + LookupEnvironmentHistoryResponse, + RunContinuousTestMetadata, + RunContinuousTestRequest, + RunContinuousTestResponse, + UpdateEnvironmentRequest, +) +from .types.experiment import ( + CreateExperimentRequest, + DeleteExperimentRequest, + Experiment, + GetExperimentRequest, + ListExperimentsRequest, + ListExperimentsResponse, + RolloutConfig, + RolloutState, + StartExperimentRequest, + StopExperimentRequest, + UpdateExperimentRequest, + VariantsHistory, + VersionVariants, +) +from .types.flow import ( + CreateFlowRequest, + DeleteFlowRequest, + ExportFlowRequest, + ExportFlowResponse, + Flow, + FlowImportStrategy, + FlowValidationResult, + GetFlowRequest, + GetFlowValidationResultRequest, + ImportFlowRequest, + ImportFlowResponse, + ListFlowsRequest, + ListFlowsResponse, + NluSettings, + TrainFlowRequest, + UpdateFlowRequest, + ValidateFlowRequest, +) +from .types.fulfillment import Fulfillment +from .types.gcs import GcsDestination +from .types.generative_settings import GenerativeSettings +from .types.import_strategy import ImportStrategy +from .types.intent import ( + CreateIntentRequest, + DeleteIntentRequest, + GetIntentRequest, + Intent, + IntentView, + ListIntentsRequest, + ListIntentsResponse, + UpdateIntentRequest, +) +from .types.page import ( + CreatePageRequest, + DeletePageRequest, + EventHandler, + Form, + GetPageRequest, + KnowledgeConnectorSettings, + ListPagesRequest, + ListPagesResponse, + Page, + TransitionRoute, + UpdatePageRequest, +) +from .types.response_message import ResponseMessage +from .types.safety_settings import SafetySettings +from .types.security_settings import ( + CreateSecuritySettingsRequest, + DeleteSecuritySettingsRequest, + GetSecuritySettingsRequest, + ListSecuritySettingsRequest, + ListSecuritySettingsResponse, + SecuritySettings, + UpdateSecuritySettingsRequest, +) +from .types.session import ( + AudioInput, + CloudConversationDebuggingInfo, + DetectIntentRequest, + DetectIntentResponse, + DtmfInput, + EventInput, + FulfillIntentRequest, + FulfillIntentResponse, + IntentInput, + Match, + MatchIntentRequest, + MatchIntentResponse, + QueryInput, + QueryParameters, + QueryResult, + SentimentAnalysisResult, + StreamingDetectIntentRequest, + StreamingDetectIntentResponse, + StreamingRecognitionResult, + TextInput, +) +from .types.session_entity_type import ( + CreateSessionEntityTypeRequest, + DeleteSessionEntityTypeRequest, + GetSessionEntityTypeRequest, + ListSessionEntityTypesRequest, + ListSessionEntityTypesResponse, + SessionEntityType, + UpdateSessionEntityTypeRequest, +) +from .types.test_case import ( + BatchDeleteTestCasesRequest, + BatchRunTestCasesMetadata, + BatchRunTestCasesRequest, + BatchRunTestCasesResponse, + CalculateCoverageRequest, + CalculateCoverageResponse, + ConversationTurn, + CreateTestCaseRequest, + ExportTestCasesMetadata, + ExportTestCasesRequest, + ExportTestCasesResponse, + GetTestCaseRequest, + GetTestCaseResultRequest, + ImportTestCasesMetadata, + ImportTestCasesRequest, + ImportTestCasesResponse, + IntentCoverage, + ListTestCaseResultsRequest, + ListTestCaseResultsResponse, + ListTestCasesRequest, + ListTestCasesResponse, + RunTestCaseMetadata, + RunTestCaseRequest, + RunTestCaseResponse, + TestCase, + TestCaseError, + TestCaseResult, + TestConfig, + TestError, + TestResult, + TestRunDifference, + TransitionCoverage, + TransitionRouteGroupCoverage, + UpdateTestCaseRequest, +) +from .types.transition_route_group import ( + CreateTransitionRouteGroupRequest, + DeleteTransitionRouteGroupRequest, + GetTransitionRouteGroupRequest, + ListTransitionRouteGroupsRequest, + ListTransitionRouteGroupsResponse, + TransitionRouteGroup, + UpdateTransitionRouteGroupRequest, +) +from .types.validation_message import ResourceName, ValidationMessage +from .types.version import ( + CompareVersionsRequest, + CompareVersionsResponse, + CreateVersionOperationMetadata, + CreateVersionRequest, + DeleteVersionRequest, + GetVersionRequest, + ListVersionsRequest, + ListVersionsResponse, + LoadVersionRequest, + UpdateVersionRequest, + Version, +) +from .types.webhook import ( + CreateWebhookRequest, + DeleteWebhookRequest, + GetWebhookRequest, + ListWebhooksRequest, + ListWebhooksResponse, + PageInfo, + SessionInfo, + UpdateWebhookRequest, + Webhook, + WebhookRequest, + WebhookResponse, +) + +__all__ = ( + "AgentsAsyncClient", + "ChangelogsAsyncClient", + "DeploymentsAsyncClient", + "EntityTypesAsyncClient", + "EnvironmentsAsyncClient", + "ExperimentsAsyncClient", + "FlowsAsyncClient", + "IntentsAsyncClient", + "PagesAsyncClient", + "SecuritySettingsServiceAsyncClient", + "SessionEntityTypesAsyncClient", + "SessionsAsyncClient", + "TestCasesAsyncClient", + "TransitionRouteGroupsAsyncClient", + "VersionsAsyncClient", + "WebhooksAsyncClient", + "AdvancedSettings", + "Agent", + "AgentValidationResult", + "AgentsClient", + "AudioEncoding", + "AudioInput", + "BatchDeleteTestCasesRequest", + "BatchRunTestCasesMetadata", + "BatchRunTestCasesRequest", + "BatchRunTestCasesResponse", + "CalculateCoverageRequest", + "CalculateCoverageResponse", + "Changelog", + "ChangelogsClient", + "CloudConversationDebuggingInfo", + "CompareVersionsRequest", + "CompareVersionsResponse", + "ContinuousTestResult", + "ConversationTurn", + "CreateAgentRequest", + "CreateEntityTypeRequest", + "CreateEnvironmentRequest", + "CreateExperimentRequest", + "CreateFlowRequest", + "CreateIntentRequest", + "CreatePageRequest", + "CreateSecuritySettingsRequest", + "CreateSessionEntityTypeRequest", + "CreateTestCaseRequest", + "CreateTransitionRouteGroupRequest", + "CreateVersionOperationMetadata", + "CreateVersionRequest", + "CreateWebhookRequest", + "DataStoreConnection", + "DataStoreType", + "DeleteAgentRequest", + "DeleteEntityTypeRequest", + "DeleteEnvironmentRequest", + "DeleteExperimentRequest", + "DeleteFlowRequest", + "DeleteIntentRequest", + "DeletePageRequest", + "DeleteSecuritySettingsRequest", + "DeleteSessionEntityTypeRequest", + "DeleteTransitionRouteGroupRequest", + "DeleteVersionRequest", + "DeleteWebhookRequest", + "DeployFlowMetadata", + "DeployFlowRequest", + "DeployFlowResponse", + "Deployment", + "DeploymentsClient", + "DetectIntentRequest", + "DetectIntentResponse", + "DtmfInput", + "EntityType", + "EntityTypesClient", + "Environment", + "EnvironmentsClient", + "EventHandler", + "EventInput", + "Experiment", + "ExperimentsClient", + "ExportAgentRequest", + "ExportAgentResponse", + "ExportFlowRequest", + "ExportFlowResponse", + "ExportTestCasesMetadata", + "ExportTestCasesRequest", + "ExportTestCasesResponse", + "Flow", + "FlowImportStrategy", + "FlowValidationResult", + "FlowsClient", + "Form", + "FulfillIntentRequest", + "FulfillIntentResponse", + "Fulfillment", + "GcsDestination", + "GenerativeSettings", + "GetAgentRequest", + "GetAgentValidationResultRequest", + "GetChangelogRequest", + "GetDeploymentRequest", + "GetEntityTypeRequest", + "GetEnvironmentRequest", + "GetExperimentRequest", + "GetFlowRequest", + "GetFlowValidationResultRequest", + "GetGenerativeSettingsRequest", + "GetIntentRequest", + "GetPageRequest", + "GetSecuritySettingsRequest", + "GetSessionEntityTypeRequest", + "GetTestCaseRequest", + "GetTestCaseResultRequest", + "GetTransitionRouteGroupRequest", + "GetVersionRequest", + "GetWebhookRequest", + "ImportFlowRequest", + "ImportFlowResponse", + "ImportStrategy", + "ImportTestCasesMetadata", + "ImportTestCasesRequest", + "ImportTestCasesResponse", + "InputAudioConfig", + "Intent", + "IntentCoverage", + "IntentInput", + "IntentView", + "IntentsClient", + "KnowledgeConnectorSettings", + "ListAgentsRequest", + "ListAgentsResponse", + "ListChangelogsRequest", + "ListChangelogsResponse", + "ListContinuousTestResultsRequest", + "ListContinuousTestResultsResponse", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "ListEntityTypesRequest", + "ListEntityTypesResponse", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", + "ListExperimentsRequest", + "ListExperimentsResponse", + "ListFlowsRequest", + "ListFlowsResponse", + "ListIntentsRequest", + "ListIntentsResponse", + "ListPagesRequest", + "ListPagesResponse", + "ListSecuritySettingsRequest", + "ListSecuritySettingsResponse", + "ListSessionEntityTypesRequest", + "ListSessionEntityTypesResponse", + "ListTestCaseResultsRequest", + "ListTestCaseResultsResponse", + "ListTestCasesRequest", + "ListTestCasesResponse", + "ListTransitionRouteGroupsRequest", + "ListTransitionRouteGroupsResponse", + "ListVersionsRequest", + "ListVersionsResponse", + "ListWebhooksRequest", + "ListWebhooksResponse", + "LoadVersionRequest", + "LookupEnvironmentHistoryRequest", + "LookupEnvironmentHistoryResponse", + "Match", + "MatchIntentRequest", + "MatchIntentResponse", + "NluSettings", + "OutputAudioConfig", + "OutputAudioEncoding", + "Page", + "PageInfo", + "PagesClient", + "QueryInput", + "QueryParameters", + "QueryResult", + "ResourceName", + "ResponseMessage", + "RestoreAgentRequest", + "RolloutConfig", + "RolloutState", + "RunContinuousTestMetadata", + "RunContinuousTestRequest", + "RunContinuousTestResponse", + "RunTestCaseMetadata", + "RunTestCaseRequest", + "RunTestCaseResponse", + "SafetySettings", + "SecuritySettings", + "SecuritySettingsServiceClient", + "SentimentAnalysisResult", + "SessionEntityType", + "SessionEntityTypesClient", + "SessionInfo", + "SessionsClient", + "SpeechModelVariant", + "SpeechToTextSettings", + "SpeechWordInfo", + "SsmlVoiceGender", + "StartExperimentRequest", + "StopExperimentRequest", + "StreamingDetectIntentRequest", + "StreamingDetectIntentResponse", + "StreamingRecognitionResult", + "SynthesizeSpeechConfig", + "TestCase", + "TestCaseError", + "TestCaseResult", + "TestCasesClient", + "TestConfig", + "TestError", + "TestResult", + "TestRunDifference", + "TextInput", + "TextToSpeechSettings", + "TrainFlowRequest", + "TransitionCoverage", + "TransitionRoute", + "TransitionRouteGroup", + "TransitionRouteGroupCoverage", + "TransitionRouteGroupsClient", + "UpdateAgentRequest", + "UpdateEntityTypeRequest", + "UpdateEnvironmentRequest", + "UpdateExperimentRequest", + "UpdateFlowRequest", + "UpdateGenerativeSettingsRequest", + "UpdateIntentRequest", + "UpdatePageRequest", + "UpdateSecuritySettingsRequest", + "UpdateSessionEntityTypeRequest", + "UpdateTestCaseRequest", + "UpdateTransitionRouteGroupRequest", + "UpdateVersionRequest", + "UpdateWebhookRequest", + "ValidateAgentRequest", + "ValidateFlowRequest", + "ValidationMessage", + "VariantsHistory", + "Version", + "VersionVariants", + "VersionsClient", + "VoiceSelectionParams", + "Webhook", + "WebhookRequest", + "WebhookResponse", + "WebhooksClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_metadata.json b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_metadata.json new file mode 100644 index 000000000000..17c042151b36 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_metadata.json @@ -0,0 +1,1798 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dialogflowcx_v3", + "protoPackage": "google.cloud.dialogflow.cx.v3", + "schema": "1.0", + "services": { + "Agents": { + "clients": { + "grpc": { + "libraryClient": "AgentsClient", + "rpcs": { + "CreateAgent": { + "methods": [ + "create_agent" + ] + }, + "DeleteAgent": { + "methods": [ + "delete_agent" + ] + }, + "ExportAgent": { + "methods": [ + "export_agent" + ] + }, + "GetAgent": { + "methods": [ + "get_agent" + ] + }, + "GetAgentValidationResult": { + "methods": [ + "get_agent_validation_result" + ] + }, + "GetGenerativeSettings": { + "methods": [ + "get_generative_settings" + ] + }, + "ListAgents": { + "methods": [ + "list_agents" + ] + }, + "RestoreAgent": { + "methods": [ + "restore_agent" + ] + }, + "UpdateAgent": { + "methods": [ + "update_agent" + ] + }, + "UpdateGenerativeSettings": { + "methods": [ + "update_generative_settings" + ] + }, + "ValidateAgent": { + "methods": [ + "validate_agent" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AgentsAsyncClient", + "rpcs": { + "CreateAgent": { + "methods": [ + "create_agent" + ] + }, + "DeleteAgent": { + "methods": [ + "delete_agent" + ] + }, + "ExportAgent": { + "methods": [ + "export_agent" + ] + }, + "GetAgent": { + "methods": [ + "get_agent" + ] + }, + "GetAgentValidationResult": { + "methods": [ + "get_agent_validation_result" + ] + }, + "GetGenerativeSettings": { + "methods": [ + "get_generative_settings" + ] + }, + "ListAgents": { + "methods": [ + "list_agents" + ] + }, + "RestoreAgent": { + "methods": [ + "restore_agent" + ] + }, + "UpdateAgent": { + "methods": [ + "update_agent" + ] + }, + "UpdateGenerativeSettings": { + "methods": [ + "update_generative_settings" + ] + }, + "ValidateAgent": { + "methods": [ + "validate_agent" + ] + } + } + }, + "rest": { + "libraryClient": "AgentsClient", + "rpcs": { + "CreateAgent": { + "methods": [ + "create_agent" + ] + }, + "DeleteAgent": { + "methods": [ + "delete_agent" + ] + }, + "ExportAgent": { + "methods": [ + "export_agent" + ] + }, + "GetAgent": { + "methods": [ + "get_agent" + ] + }, + "GetAgentValidationResult": { + "methods": [ + "get_agent_validation_result" + ] + }, + "GetGenerativeSettings": { + "methods": [ + "get_generative_settings" + ] + }, + "ListAgents": { + "methods": [ + "list_agents" + ] + }, + "RestoreAgent": { + "methods": [ + "restore_agent" + ] + }, + "UpdateAgent": { + "methods": [ + "update_agent" + ] + }, + "UpdateGenerativeSettings": { + "methods": [ + "update_generative_settings" + ] + }, + "ValidateAgent": { + "methods": [ + "validate_agent" + ] + } + } + } + } + }, + "Changelogs": { + "clients": { + "grpc": { + "libraryClient": "ChangelogsClient", + "rpcs": { + "GetChangelog": { + "methods": [ + "get_changelog" + ] + }, + "ListChangelogs": { + "methods": [ + "list_changelogs" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ChangelogsAsyncClient", + "rpcs": { + "GetChangelog": { + "methods": [ + "get_changelog" + ] + }, + "ListChangelogs": { + "methods": [ + "list_changelogs" + ] + } + } + }, + "rest": { + "libraryClient": "ChangelogsClient", + "rpcs": { + "GetChangelog": { + "methods": [ + "get_changelog" + ] + }, + "ListChangelogs": { + "methods": [ + "list_changelogs" + ] + } + } + } + } + }, + "Deployments": { + "clients": { + "grpc": { + "libraryClient": "DeploymentsClient", + "rpcs": { + "GetDeployment": { + "methods": [ + "get_deployment" + ] + }, + "ListDeployments": { + "methods": [ + "list_deployments" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DeploymentsAsyncClient", + "rpcs": { + "GetDeployment": { + "methods": [ + "get_deployment" + ] + }, + "ListDeployments": { + "methods": [ + "list_deployments" + ] + } + } + }, + "rest": { + "libraryClient": "DeploymentsClient", + "rpcs": { + "GetDeployment": { + "methods": [ + "get_deployment" + ] + }, + "ListDeployments": { + "methods": [ + "list_deployments" + ] + } + } + } + } + }, + "EntityTypes": { + "clients": { + "grpc": { + "libraryClient": "EntityTypesClient", + "rpcs": { + "CreateEntityType": { + "methods": [ + "create_entity_type" + ] + }, + "DeleteEntityType": { + "methods": [ + "delete_entity_type" + ] + }, + "GetEntityType": { + "methods": [ + "get_entity_type" + ] + }, + "ListEntityTypes": { + "methods": [ + "list_entity_types" + ] + }, + "UpdateEntityType": { + "methods": [ + "update_entity_type" + ] + } + } + }, + "grpc-async": { + "libraryClient": "EntityTypesAsyncClient", + "rpcs": { + "CreateEntityType": { + "methods": [ + "create_entity_type" + ] + }, + "DeleteEntityType": { + "methods": [ + "delete_entity_type" + ] + }, + "GetEntityType": { + "methods": [ + "get_entity_type" + ] + }, + "ListEntityTypes": { + "methods": [ + "list_entity_types" + ] + }, + "UpdateEntityType": { + "methods": [ + "update_entity_type" + ] + } + } + }, + "rest": { + "libraryClient": "EntityTypesClient", + "rpcs": { + "CreateEntityType": { + "methods": [ + "create_entity_type" + ] + }, + "DeleteEntityType": { + "methods": [ + "delete_entity_type" + ] + }, + "GetEntityType": { + "methods": [ + "get_entity_type" + ] + }, + "ListEntityTypes": { + "methods": [ + "list_entity_types" + ] + }, + "UpdateEntityType": { + "methods": [ + "update_entity_type" + ] + } + } + } + } + }, + "Environments": { + "clients": { + "grpc": { + "libraryClient": "EnvironmentsClient", + "rpcs": { + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeployFlow": { + "methods": [ + "deploy_flow" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "ListContinuousTestResults": { + "methods": [ + "list_continuous_test_results" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "LookupEnvironmentHistory": { + "methods": [ + "lookup_environment_history" + ] + }, + "RunContinuousTest": { + "methods": [ + "run_continuous_test" + ] + }, + "UpdateEnvironment": { + "methods": [ + "update_environment" + ] + } + } + }, + "grpc-async": { + "libraryClient": "EnvironmentsAsyncClient", + "rpcs": { + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeployFlow": { + "methods": [ + "deploy_flow" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "ListContinuousTestResults": { + "methods": [ + "list_continuous_test_results" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "LookupEnvironmentHistory": { + "methods": [ + "lookup_environment_history" + ] + }, + "RunContinuousTest": { + "methods": [ + "run_continuous_test" + ] + }, + "UpdateEnvironment": { + "methods": [ + "update_environment" + ] + } + } + }, + "rest": { + "libraryClient": "EnvironmentsClient", + "rpcs": { + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, + "DeployFlow": { + "methods": [ + "deploy_flow" + ] + }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, + "ListContinuousTestResults": { + "methods": [ + "list_continuous_test_results" + ] + }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, + "LookupEnvironmentHistory": { + "methods": [ + "lookup_environment_history" + ] + }, + "RunContinuousTest": { + "methods": [ + "run_continuous_test" + ] + }, + "UpdateEnvironment": { + "methods": [ + "update_environment" + ] + } + } + } + } + }, + "Experiments": { + "clients": { + "grpc": { + "libraryClient": "ExperimentsClient", + "rpcs": { + "CreateExperiment": { + "methods": [ + "create_experiment" + ] + }, + "DeleteExperiment": { + "methods": [ + "delete_experiment" + ] + }, + "GetExperiment": { + "methods": [ + "get_experiment" + ] + }, + "ListExperiments": { + "methods": [ + "list_experiments" + ] + }, + "StartExperiment": { + "methods": [ + "start_experiment" + ] + }, + "StopExperiment": { + "methods": [ + "stop_experiment" + ] + }, + "UpdateExperiment": { + "methods": [ + "update_experiment" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ExperimentsAsyncClient", + "rpcs": { + "CreateExperiment": { + "methods": [ + "create_experiment" + ] + }, + "DeleteExperiment": { + "methods": [ + "delete_experiment" + ] + }, + "GetExperiment": { + "methods": [ + "get_experiment" + ] + }, + "ListExperiments": { + "methods": [ + "list_experiments" + ] + }, + "StartExperiment": { + "methods": [ + "start_experiment" + ] + }, + "StopExperiment": { + "methods": [ + "stop_experiment" + ] + }, + "UpdateExperiment": { + "methods": [ + "update_experiment" + ] + } + } + }, + "rest": { + "libraryClient": "ExperimentsClient", + "rpcs": { + "CreateExperiment": { + "methods": [ + "create_experiment" + ] + }, + "DeleteExperiment": { + "methods": [ + "delete_experiment" + ] + }, + "GetExperiment": { + "methods": [ + "get_experiment" + ] + }, + "ListExperiments": { + "methods": [ + "list_experiments" + ] + }, + "StartExperiment": { + "methods": [ + "start_experiment" + ] + }, + "StopExperiment": { + "methods": [ + "stop_experiment" + ] + }, + "UpdateExperiment": { + "methods": [ + "update_experiment" + ] + } + } + } + } + }, + "Flows": { + "clients": { + "grpc": { + "libraryClient": "FlowsClient", + "rpcs": { + "CreateFlow": { + "methods": [ + "create_flow" + ] + }, + "DeleteFlow": { + "methods": [ + "delete_flow" + ] + }, + "ExportFlow": { + "methods": [ + "export_flow" + ] + }, + "GetFlow": { + "methods": [ + "get_flow" + ] + }, + "GetFlowValidationResult": { + "methods": [ + "get_flow_validation_result" + ] + }, + "ImportFlow": { + "methods": [ + "import_flow" + ] + }, + "ListFlows": { + "methods": [ + "list_flows" + ] + }, + "TrainFlow": { + "methods": [ + "train_flow" + ] + }, + "UpdateFlow": { + "methods": [ + "update_flow" + ] + }, + "ValidateFlow": { + "methods": [ + "validate_flow" + ] + } + } + }, + "grpc-async": { + "libraryClient": "FlowsAsyncClient", + "rpcs": { + "CreateFlow": { + "methods": [ + "create_flow" + ] + }, + "DeleteFlow": { + "methods": [ + "delete_flow" + ] + }, + "ExportFlow": { + "methods": [ + "export_flow" + ] + }, + "GetFlow": { + "methods": [ + "get_flow" + ] + }, + "GetFlowValidationResult": { + "methods": [ + "get_flow_validation_result" + ] + }, + "ImportFlow": { + "methods": [ + "import_flow" + ] + }, + "ListFlows": { + "methods": [ + "list_flows" + ] + }, + "TrainFlow": { + "methods": [ + "train_flow" + ] + }, + "UpdateFlow": { + "methods": [ + "update_flow" + ] + }, + "ValidateFlow": { + "methods": [ + "validate_flow" + ] + } + } + }, + "rest": { + "libraryClient": "FlowsClient", + "rpcs": { + "CreateFlow": { + "methods": [ + "create_flow" + ] + }, + "DeleteFlow": { + "methods": [ + "delete_flow" + ] + }, + "ExportFlow": { + "methods": [ + "export_flow" + ] + }, + "GetFlow": { + "methods": [ + "get_flow" + ] + }, + "GetFlowValidationResult": { + "methods": [ + "get_flow_validation_result" + ] + }, + "ImportFlow": { + "methods": [ + "import_flow" + ] + }, + "ListFlows": { + "methods": [ + "list_flows" + ] + }, + "TrainFlow": { + "methods": [ + "train_flow" + ] + }, + "UpdateFlow": { + "methods": [ + "update_flow" + ] + }, + "ValidateFlow": { + "methods": [ + "validate_flow" + ] + } + } + } + } + }, + "Intents": { + "clients": { + "grpc": { + "libraryClient": "IntentsClient", + "rpcs": { + "CreateIntent": { + "methods": [ + "create_intent" + ] + }, + "DeleteIntent": { + "methods": [ + "delete_intent" + ] + }, + "GetIntent": { + "methods": [ + "get_intent" + ] + }, + "ListIntents": { + "methods": [ + "list_intents" + ] + }, + "UpdateIntent": { + "methods": [ + "update_intent" + ] + } + } + }, + "grpc-async": { + "libraryClient": "IntentsAsyncClient", + "rpcs": { + "CreateIntent": { + "methods": [ + "create_intent" + ] + }, + "DeleteIntent": { + "methods": [ + "delete_intent" + ] + }, + "GetIntent": { + "methods": [ + "get_intent" + ] + }, + "ListIntents": { + "methods": [ + "list_intents" + ] + }, + "UpdateIntent": { + "methods": [ + "update_intent" + ] + } + } + }, + "rest": { + "libraryClient": "IntentsClient", + "rpcs": { + "CreateIntent": { + "methods": [ + "create_intent" + ] + }, + "DeleteIntent": { + "methods": [ + "delete_intent" + ] + }, + "GetIntent": { + "methods": [ + "get_intent" + ] + }, + "ListIntents": { + "methods": [ + "list_intents" + ] + }, + "UpdateIntent": { + "methods": [ + "update_intent" + ] + } + } + } + } + }, + "Pages": { + "clients": { + "grpc": { + "libraryClient": "PagesClient", + "rpcs": { + "CreatePage": { + "methods": [ + "create_page" + ] + }, + "DeletePage": { + "methods": [ + "delete_page" + ] + }, + "GetPage": { + "methods": [ + "get_page" + ] + }, + "ListPages": { + "methods": [ + "list_pages" + ] + }, + "UpdatePage": { + "methods": [ + "update_page" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PagesAsyncClient", + "rpcs": { + "CreatePage": { + "methods": [ + "create_page" + ] + }, + "DeletePage": { + "methods": [ + "delete_page" + ] + }, + "GetPage": { + "methods": [ + "get_page" + ] + }, + "ListPages": { + "methods": [ + "list_pages" + ] + }, + "UpdatePage": { + "methods": [ + "update_page" + ] + } + } + }, + "rest": { + "libraryClient": "PagesClient", + "rpcs": { + "CreatePage": { + "methods": [ + "create_page" + ] + }, + "DeletePage": { + "methods": [ + "delete_page" + ] + }, + "GetPage": { + "methods": [ + "get_page" + ] + }, + "ListPages": { + "methods": [ + "list_pages" + ] + }, + "UpdatePage": { + "methods": [ + "update_page" + ] + } + } + } + } + }, + "SecuritySettingsService": { + "clients": { + "grpc": { + "libraryClient": "SecuritySettingsServiceClient", + "rpcs": { + "CreateSecuritySettings": { + "methods": [ + "create_security_settings" + ] + }, + "DeleteSecuritySettings": { + "methods": [ + "delete_security_settings" + ] + }, + "GetSecuritySettings": { + "methods": [ + "get_security_settings" + ] + }, + "ListSecuritySettings": { + "methods": [ + "list_security_settings" + ] + }, + "UpdateSecuritySettings": { + "methods": [ + "update_security_settings" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SecuritySettingsServiceAsyncClient", + "rpcs": { + "CreateSecuritySettings": { + "methods": [ + "create_security_settings" + ] + }, + "DeleteSecuritySettings": { + "methods": [ + "delete_security_settings" + ] + }, + "GetSecuritySettings": { + "methods": [ + "get_security_settings" + ] + }, + "ListSecuritySettings": { + "methods": [ + "list_security_settings" + ] + }, + "UpdateSecuritySettings": { + "methods": [ + "update_security_settings" + ] + } + } + }, + "rest": { + "libraryClient": "SecuritySettingsServiceClient", + "rpcs": { + "CreateSecuritySettings": { + "methods": [ + "create_security_settings" + ] + }, + "DeleteSecuritySettings": { + "methods": [ + "delete_security_settings" + ] + }, + "GetSecuritySettings": { + "methods": [ + "get_security_settings" + ] + }, + "ListSecuritySettings": { + "methods": [ + "list_security_settings" + ] + }, + "UpdateSecuritySettings": { + "methods": [ + "update_security_settings" + ] + } + } + } + } + }, + "SessionEntityTypes": { + "clients": { + "grpc": { + "libraryClient": "SessionEntityTypesClient", + "rpcs": { + "CreateSessionEntityType": { + "methods": [ + "create_session_entity_type" + ] + }, + "DeleteSessionEntityType": { + "methods": [ + "delete_session_entity_type" + ] + }, + "GetSessionEntityType": { + "methods": [ + "get_session_entity_type" + ] + }, + "ListSessionEntityTypes": { + "methods": [ + "list_session_entity_types" + ] + }, + "UpdateSessionEntityType": { + "methods": [ + "update_session_entity_type" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SessionEntityTypesAsyncClient", + "rpcs": { + "CreateSessionEntityType": { + "methods": [ + "create_session_entity_type" + ] + }, + "DeleteSessionEntityType": { + "methods": [ + "delete_session_entity_type" + ] + }, + "GetSessionEntityType": { + "methods": [ + "get_session_entity_type" + ] + }, + "ListSessionEntityTypes": { + "methods": [ + "list_session_entity_types" + ] + }, + "UpdateSessionEntityType": { + "methods": [ + "update_session_entity_type" + ] + } + } + }, + "rest": { + "libraryClient": "SessionEntityTypesClient", + "rpcs": { + "CreateSessionEntityType": { + "methods": [ + "create_session_entity_type" + ] + }, + "DeleteSessionEntityType": { + "methods": [ + "delete_session_entity_type" + ] + }, + "GetSessionEntityType": { + "methods": [ + "get_session_entity_type" + ] + }, + "ListSessionEntityTypes": { + "methods": [ + "list_session_entity_types" + ] + }, + "UpdateSessionEntityType": { + "methods": [ + "update_session_entity_type" + ] + } + } + } + } + }, + "Sessions": { + "clients": { + "grpc": { + "libraryClient": "SessionsClient", + "rpcs": { + "DetectIntent": { + "methods": [ + "detect_intent" + ] + }, + "FulfillIntent": { + "methods": [ + "fulfill_intent" + ] + }, + "MatchIntent": { + "methods": [ + "match_intent" + ] + }, + "StreamingDetectIntent": { + "methods": [ + "streaming_detect_intent" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SessionsAsyncClient", + "rpcs": { + "DetectIntent": { + "methods": [ + "detect_intent" + ] + }, + "FulfillIntent": { + "methods": [ + "fulfill_intent" + ] + }, + "MatchIntent": { + "methods": [ + "match_intent" + ] + }, + "StreamingDetectIntent": { + "methods": [ + "streaming_detect_intent" + ] + } + } + }, + "rest": { + "libraryClient": "SessionsClient", + "rpcs": { + "DetectIntent": { + "methods": [ + "detect_intent" + ] + }, + "FulfillIntent": { + "methods": [ + "fulfill_intent" + ] + }, + "MatchIntent": { + "methods": [ + "match_intent" + ] + }, + "StreamingDetectIntent": { + "methods": [ + "streaming_detect_intent" + ] + } + } + } + } + }, + "TestCases": { + "clients": { + "grpc": { + "libraryClient": "TestCasesClient", + "rpcs": { + "BatchDeleteTestCases": { + "methods": [ + "batch_delete_test_cases" + ] + }, + "BatchRunTestCases": { + "methods": [ + "batch_run_test_cases" + ] + }, + "CalculateCoverage": { + "methods": [ + "calculate_coverage" + ] + }, + "CreateTestCase": { + "methods": [ + "create_test_case" + ] + }, + "ExportTestCases": { + "methods": [ + "export_test_cases" + ] + }, + "GetTestCase": { + "methods": [ + "get_test_case" + ] + }, + "GetTestCaseResult": { + "methods": [ + "get_test_case_result" + ] + }, + "ImportTestCases": { + "methods": [ + "import_test_cases" + ] + }, + "ListTestCaseResults": { + "methods": [ + "list_test_case_results" + ] + }, + "ListTestCases": { + "methods": [ + "list_test_cases" + ] + }, + "RunTestCase": { + "methods": [ + "run_test_case" + ] + }, + "UpdateTestCase": { + "methods": [ + "update_test_case" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TestCasesAsyncClient", + "rpcs": { + "BatchDeleteTestCases": { + "methods": [ + "batch_delete_test_cases" + ] + }, + "BatchRunTestCases": { + "methods": [ + "batch_run_test_cases" + ] + }, + "CalculateCoverage": { + "methods": [ + "calculate_coverage" + ] + }, + "CreateTestCase": { + "methods": [ + "create_test_case" + ] + }, + "ExportTestCases": { + "methods": [ + "export_test_cases" + ] + }, + "GetTestCase": { + "methods": [ + "get_test_case" + ] + }, + "GetTestCaseResult": { + "methods": [ + "get_test_case_result" + ] + }, + "ImportTestCases": { + "methods": [ + "import_test_cases" + ] + }, + "ListTestCaseResults": { + "methods": [ + "list_test_case_results" + ] + }, + "ListTestCases": { + "methods": [ + "list_test_cases" + ] + }, + "RunTestCase": { + "methods": [ + "run_test_case" + ] + }, + "UpdateTestCase": { + "methods": [ + "update_test_case" + ] + } + } + }, + "rest": { + "libraryClient": "TestCasesClient", + "rpcs": { + "BatchDeleteTestCases": { + "methods": [ + "batch_delete_test_cases" + ] + }, + "BatchRunTestCases": { + "methods": [ + "batch_run_test_cases" + ] + }, + "CalculateCoverage": { + "methods": [ + "calculate_coverage" + ] + }, + "CreateTestCase": { + "methods": [ + "create_test_case" + ] + }, + "ExportTestCases": { + "methods": [ + "export_test_cases" + ] + }, + "GetTestCase": { + "methods": [ + "get_test_case" + ] + }, + "GetTestCaseResult": { + "methods": [ + "get_test_case_result" + ] + }, + "ImportTestCases": { + "methods": [ + "import_test_cases" + ] + }, + "ListTestCaseResults": { + "methods": [ + "list_test_case_results" + ] + }, + "ListTestCases": { + "methods": [ + "list_test_cases" + ] + }, + "RunTestCase": { + "methods": [ + "run_test_case" + ] + }, + "UpdateTestCase": { + "methods": [ + "update_test_case" + ] + } + } + } + } + }, + "TransitionRouteGroups": { + "clients": { + "grpc": { + "libraryClient": "TransitionRouteGroupsClient", + "rpcs": { + "CreateTransitionRouteGroup": { + "methods": [ + "create_transition_route_group" + ] + }, + "DeleteTransitionRouteGroup": { + "methods": [ + "delete_transition_route_group" + ] + }, + "GetTransitionRouteGroup": { + "methods": [ + "get_transition_route_group" + ] + }, + "ListTransitionRouteGroups": { + "methods": [ + "list_transition_route_groups" + ] + }, + "UpdateTransitionRouteGroup": { + "methods": [ + "update_transition_route_group" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TransitionRouteGroupsAsyncClient", + "rpcs": { + "CreateTransitionRouteGroup": { + "methods": [ + "create_transition_route_group" + ] + }, + "DeleteTransitionRouteGroup": { + "methods": [ + "delete_transition_route_group" + ] + }, + "GetTransitionRouteGroup": { + "methods": [ + "get_transition_route_group" + ] + }, + "ListTransitionRouteGroups": { + "methods": [ + "list_transition_route_groups" + ] + }, + "UpdateTransitionRouteGroup": { + "methods": [ + "update_transition_route_group" + ] + } + } + }, + "rest": { + "libraryClient": "TransitionRouteGroupsClient", + "rpcs": { + "CreateTransitionRouteGroup": { + "methods": [ + "create_transition_route_group" + ] + }, + "DeleteTransitionRouteGroup": { + "methods": [ + "delete_transition_route_group" + ] + }, + "GetTransitionRouteGroup": { + "methods": [ + "get_transition_route_group" + ] + }, + "ListTransitionRouteGroups": { + "methods": [ + "list_transition_route_groups" + ] + }, + "UpdateTransitionRouteGroup": { + "methods": [ + "update_transition_route_group" + ] + } + } + } + } + }, + "Versions": { + "clients": { + "grpc": { + "libraryClient": "VersionsClient", + "rpcs": { + "CompareVersions": { + "methods": [ + "compare_versions" + ] + }, + "CreateVersion": { + "methods": [ + "create_version" + ] + }, + "DeleteVersion": { + "methods": [ + "delete_version" + ] + }, + "GetVersion": { + "methods": [ + "get_version" + ] + }, + "ListVersions": { + "methods": [ + "list_versions" + ] + }, + "LoadVersion": { + "methods": [ + "load_version" + ] + }, + "UpdateVersion": { + "methods": [ + "update_version" + ] + } + } + }, + "grpc-async": { + "libraryClient": "VersionsAsyncClient", + "rpcs": { + "CompareVersions": { + "methods": [ + "compare_versions" + ] + }, + "CreateVersion": { + "methods": [ + "create_version" + ] + }, + "DeleteVersion": { + "methods": [ + "delete_version" + ] + }, + "GetVersion": { + "methods": [ + "get_version" + ] + }, + "ListVersions": { + "methods": [ + "list_versions" + ] + }, + "LoadVersion": { + "methods": [ + "load_version" + ] + }, + "UpdateVersion": { + "methods": [ + "update_version" + ] + } + } + }, + "rest": { + "libraryClient": "VersionsClient", + "rpcs": { + "CompareVersions": { + "methods": [ + "compare_versions" + ] + }, + "CreateVersion": { + "methods": [ + "create_version" + ] + }, + "DeleteVersion": { + "methods": [ + "delete_version" + ] + }, + "GetVersion": { + "methods": [ + "get_version" + ] + }, + "ListVersions": { + "methods": [ + "list_versions" + ] + }, + "LoadVersion": { + "methods": [ + "load_version" + ] + }, + "UpdateVersion": { + "methods": [ + "update_version" + ] + } + } + } + } + }, + "Webhooks": { + "clients": { + "grpc": { + "libraryClient": "WebhooksClient", + "rpcs": { + "CreateWebhook": { + "methods": [ + "create_webhook" + ] + }, + "DeleteWebhook": { + "methods": [ + "delete_webhook" + ] + }, + "GetWebhook": { + "methods": [ + "get_webhook" + ] + }, + "ListWebhooks": { + "methods": [ + "list_webhooks" + ] + }, + "UpdateWebhook": { + "methods": [ + "update_webhook" + ] + } + } + }, + "grpc-async": { + "libraryClient": "WebhooksAsyncClient", + "rpcs": { + "CreateWebhook": { + "methods": [ + "create_webhook" + ] + }, + "DeleteWebhook": { + "methods": [ + "delete_webhook" + ] + }, + "GetWebhook": { + "methods": [ + "get_webhook" + ] + }, + "ListWebhooks": { + "methods": [ + "list_webhooks" + ] + }, + "UpdateWebhook": { + "methods": [ + "update_webhook" + ] + } + } + }, + "rest": { + "libraryClient": "WebhooksClient", + "rpcs": { + "CreateWebhook": { + "methods": [ + "create_webhook" + ] + }, + "DeleteWebhook": { + "methods": [ + "delete_webhook" + ] + }, + "GetWebhook": { + "methods": [ + "get_webhook" + ] + }, + "ListWebhooks": { + "methods": [ + "list_webhooks" + ] + }, + "UpdateWebhook": { + "methods": [ + "update_webhook" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py new file mode 100644 index 000000000000..6972597de6c2 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/py.typed b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/py.typed new file mode 100644 index 000000000000..e6386621b1bd --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dialogflow-cx package uses inline types. diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/__init__.py new file mode 100644 index 000000000000..0ea2bfb731ad --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AgentsAsyncClient +from .client import AgentsClient + +__all__ = ( + "AgentsClient", + "AgentsAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py new file mode 100644 index 000000000000..3aacba12c453 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/async_client.py @@ -0,0 +1,1765 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.agents import pagers +from google.cloud.dialogflowcx_v3.types import ( + generative_settings as gcdc_generative_settings, +) +from google.cloud.dialogflowcx_v3.types import advanced_settings +from google.cloud.dialogflowcx_v3.types import agent +from google.cloud.dialogflowcx_v3.types import agent as gcdc_agent +from google.cloud.dialogflowcx_v3.types import audio_config, flow +from google.cloud.dialogflowcx_v3.types import generative_settings +from google.cloud.dialogflowcx_v3.types import safety_settings + +from .client import AgentsClient +from .transports.base import DEFAULT_CLIENT_INFO, AgentsTransport +from .transports.grpc_asyncio import AgentsGrpcAsyncIOTransport + + +class AgentsAsyncClient: + """Service for managing [Agents][google.cloud.dialogflow.cx.v3.Agent].""" + + _client: AgentsClient + + DEFAULT_ENDPOINT = AgentsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AgentsClient.DEFAULT_MTLS_ENDPOINT + + agent_path = staticmethod(AgentsClient.agent_path) + parse_agent_path = staticmethod(AgentsClient.parse_agent_path) + agent_generative_settings_path = staticmethod( + AgentsClient.agent_generative_settings_path + ) + parse_agent_generative_settings_path = staticmethod( + AgentsClient.parse_agent_generative_settings_path + ) + agent_validation_result_path = staticmethod( + AgentsClient.agent_validation_result_path + ) + parse_agent_validation_result_path = staticmethod( + AgentsClient.parse_agent_validation_result_path + ) + environment_path = staticmethod(AgentsClient.environment_path) + parse_environment_path = staticmethod(AgentsClient.parse_environment_path) + flow_path = staticmethod(AgentsClient.flow_path) + parse_flow_path = staticmethod(AgentsClient.parse_flow_path) + flow_validation_result_path = staticmethod(AgentsClient.flow_validation_result_path) + parse_flow_validation_result_path = staticmethod( + AgentsClient.parse_flow_validation_result_path + ) + security_settings_path = staticmethod(AgentsClient.security_settings_path) + parse_security_settings_path = staticmethod( + AgentsClient.parse_security_settings_path + ) + common_billing_account_path = staticmethod(AgentsClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod( + AgentsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AgentsClient.common_folder_path) + parse_common_folder_path = staticmethod(AgentsClient.parse_common_folder_path) + common_organization_path = staticmethod(AgentsClient.common_organization_path) + parse_common_organization_path = staticmethod( + AgentsClient.parse_common_organization_path + ) + common_project_path = staticmethod(AgentsClient.common_project_path) + parse_common_project_path = staticmethod(AgentsClient.parse_common_project_path) + common_location_path = staticmethod(AgentsClient.common_location_path) + parse_common_location_path = staticmethod(AgentsClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AgentsAsyncClient: The constructed client. + """ + return AgentsClient.from_service_account_info.__func__(AgentsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AgentsAsyncClient: The constructed client. + """ + return AgentsClient.from_service_account_file.__func__(AgentsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AgentsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AgentsTransport: + """Returns the transport used by the client instance. + + Returns: + AgentsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(AgentsClient).get_transport_class, type(AgentsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AgentsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the agents client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AgentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AgentsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_agents( + self, + request: Optional[Union[agent.ListAgentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAgentsAsyncPager: + r"""Returns the list of all agents in the specified + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_agents(): + # Create a client + client = dialogflowcx_v3.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListAgentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_agents(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListAgentsRequest, dict]]): + The request object. The request message for + [Agents.ListAgents][google.cloud.dialogflow.cx.v3.Agents.ListAgents]. + parent (:class:`str`): + Required. The location to list all agents for. Format: + ``projects//locations/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.agents.pagers.ListAgentsAsyncPager: + The response message for + [Agents.ListAgents][google.cloud.dialogflow.cx.v3.Agents.ListAgents]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = agent.ListAgentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_agents, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAgentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_agent( + self, + request: Optional[Union[agent.GetAgentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> agent.Agent: + r"""Retrieves the specified agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_agent(): + # Create a client + client = dialogflowcx_v3.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetAgentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_agent(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetAgentRequest, dict]]): + The request object. The request message for + [Agents.GetAgent][google.cloud.dialogflow.cx.v3.Agents.GetAgent]. + name (:class:`str`): + Required. The name of the agent. Format: + ``projects//locations//agents/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Agent: + Agents are best described as Natural Language Understanding (NLU) modules + that transform user requests into actionable data. + You can include agents in your app, product, or + service to determine user intent and respond to the + user in a natural way. + + After you create an agent, you can add + [Intents][google.cloud.dialogflow.cx.v3.Intent], + [Entity + Types][google.cloud.dialogflow.cx.v3.EntityType], + [Flows][google.cloud.dialogflow.cx.v3.Flow], + [Fulfillments][google.cloud.dialogflow.cx.v3.Fulfillment], + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook], + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + and so on to manage the conversation flows. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = agent.GetAgentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_agent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_agent( + self, + request: Optional[Union[gcdc_agent.CreateAgentRequest, dict]] = None, + *, + parent: Optional[str] = None, + agent: Optional[gcdc_agent.Agent] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_agent.Agent: + r"""Creates an agent in the specified location. + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_agent(): + # Create a client + client = dialogflowcx_v3.AgentsAsyncClient() + + # Initialize request argument(s) + agent = dialogflowcx_v3.Agent() + agent.display_name = "display_name_value" + agent.default_language_code = "default_language_code_value" + agent.time_zone = "time_zone_value" + + request = dialogflowcx_v3.CreateAgentRequest( + parent="parent_value", + agent=agent, + ) + + # Make the request + response = await client.create_agent(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateAgentRequest, dict]]): + The request object. The request message for + [Agents.CreateAgent][google.cloud.dialogflow.cx.v3.Agents.CreateAgent]. + parent (:class:`str`): + Required. The location to create a agent for. Format: + ``projects//locations/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + agent (:class:`google.cloud.dialogflowcx_v3.types.Agent`): + Required. The agent to create. + This corresponds to the ``agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Agent: + Agents are best described as Natural Language Understanding (NLU) modules + that transform user requests into actionable data. + You can include agents in your app, product, or + service to determine user intent and respond to the + user in a natural way. + + After you create an agent, you can add + [Intents][google.cloud.dialogflow.cx.v3.Intent], + [Entity + Types][google.cloud.dialogflow.cx.v3.EntityType], + [Flows][google.cloud.dialogflow.cx.v3.Flow], + [Fulfillments][google.cloud.dialogflow.cx.v3.Fulfillment], + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook], + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + and so on to manage the conversation flows. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, agent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_agent.CreateAgentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if agent is not None: + request.agent = agent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_agent, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=180.0, + ), + default_timeout=180.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_agent( + self, + request: Optional[Union[gcdc_agent.UpdateAgentRequest, dict]] = None, + *, + agent: Optional[gcdc_agent.Agent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_agent.Agent: + r"""Updates the specified agent. + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_agent(): + # Create a client + client = dialogflowcx_v3.AgentsAsyncClient() + + # Initialize request argument(s) + agent = dialogflowcx_v3.Agent() + agent.display_name = "display_name_value" + agent.default_language_code = "default_language_code_value" + agent.time_zone = "time_zone_value" + + request = dialogflowcx_v3.UpdateAgentRequest( + agent=agent, + ) + + # Make the request + response = await client.update_agent(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateAgentRequest, dict]]): + The request object. The request message for + [Agents.UpdateAgent][google.cloud.dialogflow.cx.v3.Agents.UpdateAgent]. + agent (:class:`google.cloud.dialogflowcx_v3.types.Agent`): + Required. The agent to update. + This corresponds to the ``agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The mask to control which fields get + updated. If the mask is not present, all + fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Agent: + Agents are best described as Natural Language Understanding (NLU) modules + that transform user requests into actionable data. + You can include agents in your app, product, or + service to determine user intent and respond to the + user in a natural way. + + After you create an agent, you can add + [Intents][google.cloud.dialogflow.cx.v3.Intent], + [Entity + Types][google.cloud.dialogflow.cx.v3.EntityType], + [Flows][google.cloud.dialogflow.cx.v3.Flow], + [Fulfillments][google.cloud.dialogflow.cx.v3.Fulfillment], + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook], + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + and so on to manage the conversation flows. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([agent, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_agent.UpdateAgentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if agent is not None: + request.agent = agent + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_agent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("agent.name", request.agent.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_agent( + self, + request: Optional[Union[agent.DeleteAgentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_agent(): + # Create a client + client = dialogflowcx_v3.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteAgentRequest( + name="name_value", + ) + + # Make the request + await client.delete_agent(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeleteAgentRequest, dict]]): + The request object. The request message for + [Agents.DeleteAgent][google.cloud.dialogflow.cx.v3.Agents.DeleteAgent]. + name (:class:`str`): + Required. The name of the agent to delete. Format: + ``projects//locations//agents/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = agent.DeleteAgentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_agent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def export_agent( + self, + request: Optional[Union[agent.ExportAgentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports the specified agent to a binary file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ExportAgentResponse][google.cloud.dialogflow.cx.v3.ExportAgentResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_export_agent(): + # Create a client + client = dialogflowcx_v3.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ExportAgentRequest( + name="name_value", + ) + + # Make the request + operation = client.export_agent(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ExportAgentRequest, dict]]): + The request object. The request message for + [Agents.ExportAgent][google.cloud.dialogflow.cx.v3.Agents.ExportAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ExportAgentResponse` The response message for + [Agents.ExportAgent][google.cloud.dialogflow.cx.v3.Agents.ExportAgent]. + + """ + # Create or coerce a protobuf request object. + request = agent.ExportAgentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_agent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + agent.ExportAgentResponse, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + async def restore_agent( + self, + request: Optional[Union[agent.RestoreAgentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Restores the specified agent from a binary file. + + Replaces the current agent with a new one. Note that all + existing resources in agent (e.g. intents, entity types, flows) + will be removed. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_restore_agent(): + # Create a client + client = dialogflowcx_v3.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.RestoreAgentRequest( + agent_uri="agent_uri_value", + name="name_value", + ) + + # Make the request + operation = client.restore_agent(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.RestoreAgentRequest, dict]]): + The request object. The request message for + [Agents.RestoreAgent][google.cloud.dialogflow.cx.v3.Agents.RestoreAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + request = agent.RestoreAgentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restore_agent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + async def validate_agent( + self, + request: Optional[Union[agent.ValidateAgentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> agent.AgentValidationResult: + r"""Validates the specified agent and creates or updates + validation results. The agent in draft version is + validated. Please call this API after the training is + completed to get the complete validation results. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_validate_agent(): + # Create a client + client = dialogflowcx_v3.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ValidateAgentRequest( + name="name_value", + ) + + # Make the request + response = await client.validate_agent(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ValidateAgentRequest, dict]]): + The request object. The request message for + [Agents.ValidateAgent][google.cloud.dialogflow.cx.v3.Agents.ValidateAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.AgentValidationResult: + The response message for + [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. + + """ + # Create or coerce a protobuf request object. + request = agent.ValidateAgentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.validate_agent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_agent_validation_result( + self, + request: Optional[Union[agent.GetAgentValidationResultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> agent.AgentValidationResult: + r"""Gets the latest agent validation result. Agent + validation is performed when ValidateAgent is called. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_agent_validation_result(): + # Create a client + client = dialogflowcx_v3.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetAgentValidationResultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_agent_validation_result(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetAgentValidationResultRequest, dict]]): + The request object. The request message for + [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. + name (:class:`str`): + Required. The agent name. Format: + ``projects//locations//agents//validationResult``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.AgentValidationResult: + The response message for + [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = agent.GetAgentValidationResultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_agent_validation_result, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_generative_settings( + self, + request: Optional[Union[agent.GetGenerativeSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + language_code: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_settings.GenerativeSettings: + r"""Gets the generative settings for the agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_generative_settings(): + # Create a client + client = dialogflowcx_v3.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetGenerativeSettingsRequest( + name="name_value", + language_code="language_code_value", + ) + + # Make the request + response = await client.get_generative_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetGenerativeSettingsRequest, dict]]): + The request object. Request for + [GetGenerativeSettings][google.cloud.dialogflow.cx.v3.Agents.GetGenerativeSettings] + RPC. + name (:class:`str`): + Required. Format: + ``projects//locations//agents//generativeSettings``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + language_code (:class:`str`): + Required. Language code of the + generative settings. + + This corresponds to the ``language_code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.GenerativeSettings: + Settings for Generative AI. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, language_code]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = agent.GetGenerativeSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if language_code is not None: + request.language_code = language_code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_generative_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_generative_settings( + self, + request: Optional[Union[agent.UpdateGenerativeSettingsRequest, dict]] = None, + *, + generative_settings: Optional[ + gcdc_generative_settings.GenerativeSettings + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_generative_settings.GenerativeSettings: + r"""Updates the generative settings for the agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_generative_settings(): + # Create a client + client = dialogflowcx_v3.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.UpdateGenerativeSettingsRequest( + ) + + # Make the request + response = await client.update_generative_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateGenerativeSettingsRequest, dict]]): + The request object. Request for + [UpdateGenerativeSettings][google.cloud.dialogflow.cx.v3.Agents.UpdateGenerativeSettings] + RPC. + generative_settings (:class:`google.cloud.dialogflowcx_v3.types.GenerativeSettings`): + Required. Generative settings to + update. + + This corresponds to the ``generative_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The mask to control which + fields get updated. If the mask is not + present, all fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.GenerativeSettings: + Settings for Generative AI. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([generative_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = agent.UpdateGenerativeSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if generative_settings is not None: + request.generative_settings = generative_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_generative_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("generative_settings.name", request.generative_settings.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AgentsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AgentsAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py new file mode 100644 index 000000000000..21f116761315 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py @@ -0,0 +1,2121 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.agents import pagers +from google.cloud.dialogflowcx_v3.types import ( + generative_settings as gcdc_generative_settings, +) +from google.cloud.dialogflowcx_v3.types import advanced_settings +from google.cloud.dialogflowcx_v3.types import agent +from google.cloud.dialogflowcx_v3.types import agent as gcdc_agent +from google.cloud.dialogflowcx_v3.types import audio_config, flow +from google.cloud.dialogflowcx_v3.types import generative_settings +from google.cloud.dialogflowcx_v3.types import safety_settings + +from .transports.base import DEFAULT_CLIENT_INFO, AgentsTransport +from .transports.grpc import AgentsGrpcTransport +from .transports.grpc_asyncio import AgentsGrpcAsyncIOTransport +from .transports.rest import AgentsRestTransport + + +class AgentsClientMeta(type): + """Metaclass for the Agents client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[AgentsTransport]] + _transport_registry["grpc"] = AgentsGrpcTransport + _transport_registry["grpc_asyncio"] = AgentsGrpcAsyncIOTransport + _transport_registry["rest"] = AgentsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AgentsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AgentsClient(metaclass=AgentsClientMeta): + """Service for managing [Agents][google.cloud.dialogflow.cx.v3.Agent].""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AgentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AgentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AgentsTransport: + """Returns the transport used by the client instance. + + Returns: + AgentsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def agent_path( + project: str, + location: str, + agent: str, + ) -> str: + """Returns a fully-qualified agent string.""" + return "projects/{project}/locations/{location}/agents/{agent}".format( + project=project, + location=location, + agent=agent, + ) + + @staticmethod + def parse_agent_path(path: str) -> Dict[str, str]: + """Parses a agent path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def agent_generative_settings_path( + project: str, + location: str, + agent: str, + ) -> str: + """Returns a fully-qualified agent_generative_settings string.""" + return "projects/{project}/locations/{location}/agents/{agent}/generativeSettings".format( + project=project, + location=location, + agent=agent, + ) + + @staticmethod + def parse_agent_generative_settings_path(path: str) -> Dict[str, str]: + """Parses a agent_generative_settings path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/generativeSettings$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def agent_validation_result_path( + project: str, + location: str, + agent: str, + ) -> str: + """Returns a fully-qualified agent_validation_result string.""" + return "projects/{project}/locations/{location}/agents/{agent}/validationResult".format( + project=project, + location=location, + agent=agent, + ) + + @staticmethod + def parse_agent_validation_result_path(path: str) -> Dict[str, str]: + """Parses a agent_validation_result path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/validationResult$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def environment_path( + project: str, + location: str, + agent: str, + environment: str, + ) -> str: + """Returns a fully-qualified environment string.""" + return "projects/{project}/locations/{location}/agents/{agent}/environments/{environment}".format( + project=project, + location=location, + agent=agent, + environment=environment, + ) + + @staticmethod + def parse_environment_path(path: str) -> Dict[str, str]: + """Parses a environment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/environments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def flow_path( + project: str, + location: str, + agent: str, + flow: str, + ) -> str: + """Returns a fully-qualified flow string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}".format( + project=project, + location=location, + agent=agent, + flow=flow, + ) + + @staticmethod + def parse_flow_path(path: str) -> Dict[str, str]: + """Parses a flow path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def flow_validation_result_path( + project: str, + location: str, + agent: str, + flow: str, + ) -> str: + """Returns a fully-qualified flow_validation_result string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/validationResult".format( + project=project, + location=location, + agent=agent, + flow=flow, + ) + + @staticmethod + def parse_flow_validation_result_path(path: str) -> Dict[str, str]: + """Parses a flow_validation_result path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/validationResult$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def security_settings_path( + project: str, + location: str, + security_settings: str, + ) -> str: + """Returns a fully-qualified security_settings string.""" + return "projects/{project}/locations/{location}/securitySettings/{security_settings}".format( + project=project, + location=location, + security_settings=security_settings, + ) + + @staticmethod + def parse_security_settings_path(path: str) -> Dict[str, str]: + """Parses a security_settings path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/securitySettings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AgentsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the agents client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AgentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AgentsTransport): + # transport is a AgentsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_agents( + self, + request: Optional[Union[agent.ListAgentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAgentsPager: + r"""Returns the list of all agents in the specified + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_agents(): + # Create a client + client = dialogflowcx_v3.AgentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListAgentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_agents(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListAgentsRequest, dict]): + The request object. The request message for + [Agents.ListAgents][google.cloud.dialogflow.cx.v3.Agents.ListAgents]. + parent (str): + Required. The location to list all agents for. Format: + ``projects//locations/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.agents.pagers.ListAgentsPager: + The response message for + [Agents.ListAgents][google.cloud.dialogflow.cx.v3.Agents.ListAgents]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a agent.ListAgentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, agent.ListAgentsRequest): + request = agent.ListAgentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_agents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAgentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_agent( + self, + request: Optional[Union[agent.GetAgentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> agent.Agent: + r"""Retrieves the specified agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_agent(): + # Create a client + client = dialogflowcx_v3.AgentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetAgentRequest( + name="name_value", + ) + + # Make the request + response = client.get_agent(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetAgentRequest, dict]): + The request object. The request message for + [Agents.GetAgent][google.cloud.dialogflow.cx.v3.Agents.GetAgent]. + name (str): + Required. The name of the agent. Format: + ``projects//locations//agents/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Agent: + Agents are best described as Natural Language Understanding (NLU) modules + that transform user requests into actionable data. + You can include agents in your app, product, or + service to determine user intent and respond to the + user in a natural way. + + After you create an agent, you can add + [Intents][google.cloud.dialogflow.cx.v3.Intent], + [Entity + Types][google.cloud.dialogflow.cx.v3.EntityType], + [Flows][google.cloud.dialogflow.cx.v3.Flow], + [Fulfillments][google.cloud.dialogflow.cx.v3.Fulfillment], + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook], + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + and so on to manage the conversation flows. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a agent.GetAgentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, agent.GetAgentRequest): + request = agent.GetAgentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_agent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_agent( + self, + request: Optional[Union[gcdc_agent.CreateAgentRequest, dict]] = None, + *, + parent: Optional[str] = None, + agent: Optional[gcdc_agent.Agent] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_agent.Agent: + r"""Creates an agent in the specified location. + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_agent(): + # Create a client + client = dialogflowcx_v3.AgentsClient() + + # Initialize request argument(s) + agent = dialogflowcx_v3.Agent() + agent.display_name = "display_name_value" + agent.default_language_code = "default_language_code_value" + agent.time_zone = "time_zone_value" + + request = dialogflowcx_v3.CreateAgentRequest( + parent="parent_value", + agent=agent, + ) + + # Make the request + response = client.create_agent(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateAgentRequest, dict]): + The request object. The request message for + [Agents.CreateAgent][google.cloud.dialogflow.cx.v3.Agents.CreateAgent]. + parent (str): + Required. The location to create a agent for. Format: + ``projects//locations/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + agent (google.cloud.dialogflowcx_v3.types.Agent): + Required. The agent to create. + This corresponds to the ``agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Agent: + Agents are best described as Natural Language Understanding (NLU) modules + that transform user requests into actionable data. + You can include agents in your app, product, or + service to determine user intent and respond to the + user in a natural way. + + After you create an agent, you can add + [Intents][google.cloud.dialogflow.cx.v3.Intent], + [Entity + Types][google.cloud.dialogflow.cx.v3.EntityType], + [Flows][google.cloud.dialogflow.cx.v3.Flow], + [Fulfillments][google.cloud.dialogflow.cx.v3.Fulfillment], + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook], + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + and so on to manage the conversation flows. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, agent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_agent.CreateAgentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_agent.CreateAgentRequest): + request = gcdc_agent.CreateAgentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if agent is not None: + request.agent = agent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_agent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_agent( + self, + request: Optional[Union[gcdc_agent.UpdateAgentRequest, dict]] = None, + *, + agent: Optional[gcdc_agent.Agent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_agent.Agent: + r"""Updates the specified agent. + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_agent(): + # Create a client + client = dialogflowcx_v3.AgentsClient() + + # Initialize request argument(s) + agent = dialogflowcx_v3.Agent() + agent.display_name = "display_name_value" + agent.default_language_code = "default_language_code_value" + agent.time_zone = "time_zone_value" + + request = dialogflowcx_v3.UpdateAgentRequest( + agent=agent, + ) + + # Make the request + response = client.update_agent(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateAgentRequest, dict]): + The request object. The request message for + [Agents.UpdateAgent][google.cloud.dialogflow.cx.v3.Agents.UpdateAgent]. + agent (google.cloud.dialogflowcx_v3.types.Agent): + Required. The agent to update. + This corresponds to the ``agent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get + updated. If the mask is not present, all + fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Agent: + Agents are best described as Natural Language Understanding (NLU) modules + that transform user requests into actionable data. + You can include agents in your app, product, or + service to determine user intent and respond to the + user in a natural way. + + After you create an agent, you can add + [Intents][google.cloud.dialogflow.cx.v3.Intent], + [Entity + Types][google.cloud.dialogflow.cx.v3.EntityType], + [Flows][google.cloud.dialogflow.cx.v3.Flow], + [Fulfillments][google.cloud.dialogflow.cx.v3.Fulfillment], + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook], + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + and so on to manage the conversation flows. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([agent, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_agent.UpdateAgentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_agent.UpdateAgentRequest): + request = gcdc_agent.UpdateAgentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if agent is not None: + request.agent = agent + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_agent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("agent.name", request.agent.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_agent( + self, + request: Optional[Union[agent.DeleteAgentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_agent(): + # Create a client + client = dialogflowcx_v3.AgentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteAgentRequest( + name="name_value", + ) + + # Make the request + client.delete_agent(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeleteAgentRequest, dict]): + The request object. The request message for + [Agents.DeleteAgent][google.cloud.dialogflow.cx.v3.Agents.DeleteAgent]. + name (str): + Required. The name of the agent to delete. Format: + ``projects//locations//agents/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a agent.DeleteAgentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, agent.DeleteAgentRequest): + request = agent.DeleteAgentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_agent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def export_agent( + self, + request: Optional[Union[agent.ExportAgentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports the specified agent to a binary file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ExportAgentResponse][google.cloud.dialogflow.cx.v3.ExportAgentResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_export_agent(): + # Create a client + client = dialogflowcx_v3.AgentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ExportAgentRequest( + name="name_value", + ) + + # Make the request + operation = client.export_agent(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ExportAgentRequest, dict]): + The request object. The request message for + [Agents.ExportAgent][google.cloud.dialogflow.cx.v3.Agents.ExportAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ExportAgentResponse` The response message for + [Agents.ExportAgent][google.cloud.dialogflow.cx.v3.Agents.ExportAgent]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a agent.ExportAgentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, agent.ExportAgentRequest): + request = agent.ExportAgentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_agent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + agent.ExportAgentResponse, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + def restore_agent( + self, + request: Optional[Union[agent.RestoreAgentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restores the specified agent from a binary file. + + Replaces the current agent with a new one. Note that all + existing resources in agent (e.g. intents, entity types, flows) + will be removed. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_restore_agent(): + # Create a client + client = dialogflowcx_v3.AgentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.RestoreAgentRequest( + agent_uri="agent_uri_value", + name="name_value", + ) + + # Make the request + operation = client.restore_agent(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.RestoreAgentRequest, dict]): + The request object. The request message for + [Agents.RestoreAgent][google.cloud.dialogflow.cx.v3.Agents.RestoreAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a agent.RestoreAgentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, agent.RestoreAgentRequest): + request = agent.RestoreAgentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_agent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + def validate_agent( + self, + request: Optional[Union[agent.ValidateAgentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> agent.AgentValidationResult: + r"""Validates the specified agent and creates or updates + validation results. The agent in draft version is + validated. Please call this API after the training is + completed to get the complete validation results. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_validate_agent(): + # Create a client + client = dialogflowcx_v3.AgentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ValidateAgentRequest( + name="name_value", + ) + + # Make the request + response = client.validate_agent(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ValidateAgentRequest, dict]): + The request object. The request message for + [Agents.ValidateAgent][google.cloud.dialogflow.cx.v3.Agents.ValidateAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.AgentValidationResult: + The response message for + [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a agent.ValidateAgentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, agent.ValidateAgentRequest): + request = agent.ValidateAgentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.validate_agent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_agent_validation_result( + self, + request: Optional[Union[agent.GetAgentValidationResultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> agent.AgentValidationResult: + r"""Gets the latest agent validation result. Agent + validation is performed when ValidateAgent is called. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_agent_validation_result(): + # Create a client + client = dialogflowcx_v3.AgentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetAgentValidationResultRequest( + name="name_value", + ) + + # Make the request + response = client.get_agent_validation_result(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetAgentValidationResultRequest, dict]): + The request object. The request message for + [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. + name (str): + Required. The agent name. Format: + ``projects//locations//agents//validationResult``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.AgentValidationResult: + The response message for + [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a agent.GetAgentValidationResultRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, agent.GetAgentValidationResultRequest): + request = agent.GetAgentValidationResultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_agent_validation_result + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_generative_settings( + self, + request: Optional[Union[agent.GetGenerativeSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + language_code: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_settings.GenerativeSettings: + r"""Gets the generative settings for the agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_generative_settings(): + # Create a client + client = dialogflowcx_v3.AgentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetGenerativeSettingsRequest( + name="name_value", + language_code="language_code_value", + ) + + # Make the request + response = client.get_generative_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetGenerativeSettingsRequest, dict]): + The request object. Request for + [GetGenerativeSettings][google.cloud.dialogflow.cx.v3.Agents.GetGenerativeSettings] + RPC. + name (str): + Required. Format: + ``projects//locations//agents//generativeSettings``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + language_code (str): + Required. Language code of the + generative settings. + + This corresponds to the ``language_code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.GenerativeSettings: + Settings for Generative AI. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, language_code]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a agent.GetGenerativeSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, agent.GetGenerativeSettingsRequest): + request = agent.GetGenerativeSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if language_code is not None: + request.language_code = language_code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_generative_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_generative_settings( + self, + request: Optional[Union[agent.UpdateGenerativeSettingsRequest, dict]] = None, + *, + generative_settings: Optional[ + gcdc_generative_settings.GenerativeSettings + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_generative_settings.GenerativeSettings: + r"""Updates the generative settings for the agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_generative_settings(): + # Create a client + client = dialogflowcx_v3.AgentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.UpdateGenerativeSettingsRequest( + ) + + # Make the request + response = client.update_generative_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateGenerativeSettingsRequest, dict]): + The request object. Request for + [UpdateGenerativeSettings][google.cloud.dialogflow.cx.v3.Agents.UpdateGenerativeSettings] + RPC. + generative_settings (google.cloud.dialogflowcx_v3.types.GenerativeSettings): + Required. Generative settings to + update. + + This corresponds to the ``generative_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The mask to control which + fields get updated. If the mask is not + present, all fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.GenerativeSettings: + Settings for Generative AI. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([generative_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a agent.UpdateGenerativeSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, agent.UpdateGenerativeSettingsRequest): + request = agent.UpdateGenerativeSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if generative_settings is not None: + request.generative_settings = generative_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_generative_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("generative_settings.name", request.generative_settings.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AgentsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AgentsClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/pagers.py new file mode 100644 index 000000000000..a3b83d0ee8df --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import agent + + +class ListAgentsPager: + """A pager for iterating through ``list_agents`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListAgentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``agents`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAgents`` requests and continue to iterate + through the ``agents`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListAgentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., agent.ListAgentsResponse], + request: agent.ListAgentsRequest, + response: agent.ListAgentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListAgentsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListAgentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = agent.ListAgentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[agent.ListAgentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[agent.Agent]: + for page in self.pages: + yield from page.agents + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAgentsAsyncPager: + """A pager for iterating through ``list_agents`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListAgentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``agents`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAgents`` requests and continue to iterate + through the ``agents`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListAgentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[agent.ListAgentsResponse]], + request: agent.ListAgentsRequest, + response: agent.ListAgentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListAgentsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListAgentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = agent.ListAgentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[agent.ListAgentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[agent.Agent]: + async def async_generator(): + async for page in self.pages: + for response in page.agents: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/__init__.py new file mode 100644 index 000000000000..622043da5e35 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AgentsTransport +from .grpc import AgentsGrpcTransport +from .grpc_asyncio import AgentsGrpcAsyncIOTransport +from .rest import AgentsRestInterceptor, AgentsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AgentsTransport]] +_transport_registry["grpc"] = AgentsGrpcTransport +_transport_registry["grpc_asyncio"] = AgentsGrpcAsyncIOTransport +_transport_registry["rest"] = AgentsRestTransport + +__all__ = ( + "AgentsTransport", + "AgentsGrpcTransport", + "AgentsGrpcAsyncIOTransport", + "AgentsRestTransport", + "AgentsRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/base.py new file mode 100644 index 000000000000..5ff14471cf92 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/base.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import ( + generative_settings as gcdc_generative_settings, +) +from google.cloud.dialogflowcx_v3.types import agent +from google.cloud.dialogflowcx_v3.types import agent as gcdc_agent +from google.cloud.dialogflowcx_v3.types import generative_settings + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AgentsTransport(abc.ABC): + """Abstract transport class for Agents.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_agents: gapic_v1.method.wrap_method( + self.list_agents, + default_timeout=None, + client_info=client_info, + ), + self.get_agent: gapic_v1.method.wrap_method( + self.get_agent, + default_timeout=None, + client_info=client_info, + ), + self.create_agent: gapic_v1.method.wrap_method( + self.create_agent, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=180.0, + ), + default_timeout=180.0, + client_info=client_info, + ), + self.update_agent: gapic_v1.method.wrap_method( + self.update_agent, + default_timeout=None, + client_info=client_info, + ), + self.delete_agent: gapic_v1.method.wrap_method( + self.delete_agent, + default_timeout=None, + client_info=client_info, + ), + self.export_agent: gapic_v1.method.wrap_method( + self.export_agent, + default_timeout=None, + client_info=client_info, + ), + self.restore_agent: gapic_v1.method.wrap_method( + self.restore_agent, + default_timeout=None, + client_info=client_info, + ), + self.validate_agent: gapic_v1.method.wrap_method( + self.validate_agent, + default_timeout=None, + client_info=client_info, + ), + self.get_agent_validation_result: gapic_v1.method.wrap_method( + self.get_agent_validation_result, + default_timeout=None, + client_info=client_info, + ), + self.get_generative_settings: gapic_v1.method.wrap_method( + self.get_generative_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_generative_settings: gapic_v1.method.wrap_method( + self.update_generative_settings, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_agents( + self, + ) -> Callable[ + [agent.ListAgentsRequest], + Union[agent.ListAgentsResponse, Awaitable[agent.ListAgentsResponse]], + ]: + raise NotImplementedError() + + @property + def get_agent( + self, + ) -> Callable[[agent.GetAgentRequest], Union[agent.Agent, Awaitable[agent.Agent]]]: + raise NotImplementedError() + + @property + def create_agent( + self, + ) -> Callable[ + [gcdc_agent.CreateAgentRequest], + Union[gcdc_agent.Agent, Awaitable[gcdc_agent.Agent]], + ]: + raise NotImplementedError() + + @property + def update_agent( + self, + ) -> Callable[ + [gcdc_agent.UpdateAgentRequest], + Union[gcdc_agent.Agent, Awaitable[gcdc_agent.Agent]], + ]: + raise NotImplementedError() + + @property + def delete_agent( + self, + ) -> Callable[ + [agent.DeleteAgentRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def export_agent( + self, + ) -> Callable[ + [agent.ExportAgentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restore_agent( + self, + ) -> Callable[ + [agent.RestoreAgentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def validate_agent( + self, + ) -> Callable[ + [agent.ValidateAgentRequest], + Union[agent.AgentValidationResult, Awaitable[agent.AgentValidationResult]], + ]: + raise NotImplementedError() + + @property + def get_agent_validation_result( + self, + ) -> Callable[ + [agent.GetAgentValidationResultRequest], + Union[agent.AgentValidationResult, Awaitable[agent.AgentValidationResult]], + ]: + raise NotImplementedError() + + @property + def get_generative_settings( + self, + ) -> Callable[ + [agent.GetGenerativeSettingsRequest], + Union[ + generative_settings.GenerativeSettings, + Awaitable[generative_settings.GenerativeSettings], + ], + ]: + raise NotImplementedError() + + @property + def update_generative_settings( + self, + ) -> Callable[ + [agent.UpdateGenerativeSettingsRequest], + Union[ + gcdc_generative_settings.GenerativeSettings, + Awaitable[gcdc_generative_settings.GenerativeSettings], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AgentsTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/grpc.py new file mode 100644 index 000000000000..d2d981fb64ce --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/grpc.py @@ -0,0 +1,679 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + generative_settings as gcdc_generative_settings, +) +from google.cloud.dialogflowcx_v3.types import agent +from google.cloud.dialogflowcx_v3.types import agent as gcdc_agent +from google.cloud.dialogflowcx_v3.types import generative_settings + +from .base import DEFAULT_CLIENT_INFO, AgentsTransport + + +class AgentsGrpcTransport(AgentsTransport): + """gRPC backend transport for Agents. + + Service for managing [Agents][google.cloud.dialogflow.cx.v3.Agent]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_agents( + self, + ) -> Callable[[agent.ListAgentsRequest], agent.ListAgentsResponse]: + r"""Return a callable for the list agents method over gRPC. + + Returns the list of all agents in the specified + location. + + Returns: + Callable[[~.ListAgentsRequest], + ~.ListAgentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_agents" not in self._stubs: + self._stubs["list_agents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/ListAgents", + request_serializer=agent.ListAgentsRequest.serialize, + response_deserializer=agent.ListAgentsResponse.deserialize, + ) + return self._stubs["list_agents"] + + @property + def get_agent(self) -> Callable[[agent.GetAgentRequest], agent.Agent]: + r"""Return a callable for the get agent method over gRPC. + + Retrieves the specified agent. + + Returns: + Callable[[~.GetAgentRequest], + ~.Agent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_agent" not in self._stubs: + self._stubs["get_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/GetAgent", + request_serializer=agent.GetAgentRequest.serialize, + response_deserializer=agent.Agent.deserialize, + ) + return self._stubs["get_agent"] + + @property + def create_agent( + self, + ) -> Callable[[gcdc_agent.CreateAgentRequest], gcdc_agent.Agent]: + r"""Return a callable for the create agent method over gRPC. + + Creates an agent in the specified location. + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreateAgentRequest], + ~.Agent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_agent" not in self._stubs: + self._stubs["create_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/CreateAgent", + request_serializer=gcdc_agent.CreateAgentRequest.serialize, + response_deserializer=gcdc_agent.Agent.deserialize, + ) + return self._stubs["create_agent"] + + @property + def update_agent( + self, + ) -> Callable[[gcdc_agent.UpdateAgentRequest], gcdc_agent.Agent]: + r"""Return a callable for the update agent method over gRPC. + + Updates the specified agent. + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdateAgentRequest], + ~.Agent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_agent" not in self._stubs: + self._stubs["update_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/UpdateAgent", + request_serializer=gcdc_agent.UpdateAgentRequest.serialize, + response_deserializer=gcdc_agent.Agent.deserialize, + ) + return self._stubs["update_agent"] + + @property + def delete_agent(self) -> Callable[[agent.DeleteAgentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete agent method over gRPC. + + Deletes the specified agent. + + Returns: + Callable[[~.DeleteAgentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_agent" not in self._stubs: + self._stubs["delete_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/DeleteAgent", + request_serializer=agent.DeleteAgentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_agent"] + + @property + def export_agent( + self, + ) -> Callable[[agent.ExportAgentRequest], operations_pb2.Operation]: + r"""Return a callable for the export agent method over gRPC. + + Exports the specified agent to a binary file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ExportAgentResponse][google.cloud.dialogflow.cx.v3.ExportAgentResponse] + + Returns: + Callable[[~.ExportAgentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_agent" not in self._stubs: + self._stubs["export_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/ExportAgent", + request_serializer=agent.ExportAgentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_agent"] + + @property + def restore_agent( + self, + ) -> Callable[[agent.RestoreAgentRequest], operations_pb2.Operation]: + r"""Return a callable for the restore agent method over gRPC. + + Restores the specified agent from a binary file. + + Replaces the current agent with a new one. Note that all + existing resources in agent (e.g. intents, entity types, flows) + will be removed. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + Returns: + Callable[[~.RestoreAgentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_agent" not in self._stubs: + self._stubs["restore_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/RestoreAgent", + request_serializer=agent.RestoreAgentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_agent"] + + @property + def validate_agent( + self, + ) -> Callable[[agent.ValidateAgentRequest], agent.AgentValidationResult]: + r"""Return a callable for the validate agent method over gRPC. + + Validates the specified agent and creates or updates + validation results. The agent in draft version is + validated. Please call this API after the training is + completed to get the complete validation results. + + Returns: + Callable[[~.ValidateAgentRequest], + ~.AgentValidationResult]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_agent" not in self._stubs: + self._stubs["validate_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/ValidateAgent", + request_serializer=agent.ValidateAgentRequest.serialize, + response_deserializer=agent.AgentValidationResult.deserialize, + ) + return self._stubs["validate_agent"] + + @property + def get_agent_validation_result( + self, + ) -> Callable[[agent.GetAgentValidationResultRequest], agent.AgentValidationResult]: + r"""Return a callable for the get agent validation result method over gRPC. + + Gets the latest agent validation result. Agent + validation is performed when ValidateAgent is called. + + Returns: + Callable[[~.GetAgentValidationResultRequest], + ~.AgentValidationResult]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_agent_validation_result" not in self._stubs: + self._stubs["get_agent_validation_result"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/GetAgentValidationResult", + request_serializer=agent.GetAgentValidationResultRequest.serialize, + response_deserializer=agent.AgentValidationResult.deserialize, + ) + return self._stubs["get_agent_validation_result"] + + @property + def get_generative_settings( + self, + ) -> Callable[ + [agent.GetGenerativeSettingsRequest], generative_settings.GenerativeSettings + ]: + r"""Return a callable for the get generative settings method over gRPC. + + Gets the generative settings for the agent. + + Returns: + Callable[[~.GetGenerativeSettingsRequest], + ~.GenerativeSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_generative_settings" not in self._stubs: + self._stubs["get_generative_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/GetGenerativeSettings", + request_serializer=agent.GetGenerativeSettingsRequest.serialize, + response_deserializer=generative_settings.GenerativeSettings.deserialize, + ) + return self._stubs["get_generative_settings"] + + @property + def update_generative_settings( + self, + ) -> Callable[ + [agent.UpdateGenerativeSettingsRequest], + gcdc_generative_settings.GenerativeSettings, + ]: + r"""Return a callable for the update generative settings method over gRPC. + + Updates the generative settings for the agent. + + Returns: + Callable[[~.UpdateGenerativeSettingsRequest], + ~.GenerativeSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_generative_settings" not in self._stubs: + self._stubs["update_generative_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/UpdateGenerativeSettings", + request_serializer=agent.UpdateGenerativeSettingsRequest.serialize, + response_deserializer=gcdc_generative_settings.GenerativeSettings.deserialize, + ) + return self._stubs["update_generative_settings"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AgentsGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/grpc_asyncio.py new file mode 100644 index 000000000000..88a25d9f9da1 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/grpc_asyncio.py @@ -0,0 +1,685 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + generative_settings as gcdc_generative_settings, +) +from google.cloud.dialogflowcx_v3.types import agent +from google.cloud.dialogflowcx_v3.types import agent as gcdc_agent +from google.cloud.dialogflowcx_v3.types import generative_settings + +from .base import DEFAULT_CLIENT_INFO, AgentsTransport +from .grpc import AgentsGrpcTransport + + +class AgentsGrpcAsyncIOTransport(AgentsTransport): + """gRPC AsyncIO backend transport for Agents. + + Service for managing [Agents][google.cloud.dialogflow.cx.v3.Agent]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_agents( + self, + ) -> Callable[[agent.ListAgentsRequest], Awaitable[agent.ListAgentsResponse]]: + r"""Return a callable for the list agents method over gRPC. + + Returns the list of all agents in the specified + location. + + Returns: + Callable[[~.ListAgentsRequest], + Awaitable[~.ListAgentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_agents" not in self._stubs: + self._stubs["list_agents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/ListAgents", + request_serializer=agent.ListAgentsRequest.serialize, + response_deserializer=agent.ListAgentsResponse.deserialize, + ) + return self._stubs["list_agents"] + + @property + def get_agent(self) -> Callable[[agent.GetAgentRequest], Awaitable[agent.Agent]]: + r"""Return a callable for the get agent method over gRPC. + + Retrieves the specified agent. + + Returns: + Callable[[~.GetAgentRequest], + Awaitable[~.Agent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_agent" not in self._stubs: + self._stubs["get_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/GetAgent", + request_serializer=agent.GetAgentRequest.serialize, + response_deserializer=agent.Agent.deserialize, + ) + return self._stubs["get_agent"] + + @property + def create_agent( + self, + ) -> Callable[[gcdc_agent.CreateAgentRequest], Awaitable[gcdc_agent.Agent]]: + r"""Return a callable for the create agent method over gRPC. + + Creates an agent in the specified location. + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreateAgentRequest], + Awaitable[~.Agent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_agent" not in self._stubs: + self._stubs["create_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/CreateAgent", + request_serializer=gcdc_agent.CreateAgentRequest.serialize, + response_deserializer=gcdc_agent.Agent.deserialize, + ) + return self._stubs["create_agent"] + + @property + def update_agent( + self, + ) -> Callable[[gcdc_agent.UpdateAgentRequest], Awaitable[gcdc_agent.Agent]]: + r"""Return a callable for the update agent method over gRPC. + + Updates the specified agent. + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdateAgentRequest], + Awaitable[~.Agent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_agent" not in self._stubs: + self._stubs["update_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/UpdateAgent", + request_serializer=gcdc_agent.UpdateAgentRequest.serialize, + response_deserializer=gcdc_agent.Agent.deserialize, + ) + return self._stubs["update_agent"] + + @property + def delete_agent( + self, + ) -> Callable[[agent.DeleteAgentRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete agent method over gRPC. + + Deletes the specified agent. + + Returns: + Callable[[~.DeleteAgentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_agent" not in self._stubs: + self._stubs["delete_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/DeleteAgent", + request_serializer=agent.DeleteAgentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_agent"] + + @property + def export_agent( + self, + ) -> Callable[[agent.ExportAgentRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the export agent method over gRPC. + + Exports the specified agent to a binary file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ExportAgentResponse][google.cloud.dialogflow.cx.v3.ExportAgentResponse] + + Returns: + Callable[[~.ExportAgentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_agent" not in self._stubs: + self._stubs["export_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/ExportAgent", + request_serializer=agent.ExportAgentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_agent"] + + @property + def restore_agent( + self, + ) -> Callable[[agent.RestoreAgentRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the restore agent method over gRPC. + + Restores the specified agent from a binary file. + + Replaces the current agent with a new one. Note that all + existing resources in agent (e.g. intents, entity types, flows) + will be removed. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + Note: You should always train flows prior to sending them + queries. See the `training + documentation `__. + + Returns: + Callable[[~.RestoreAgentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_agent" not in self._stubs: + self._stubs["restore_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/RestoreAgent", + request_serializer=agent.RestoreAgentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_agent"] + + @property + def validate_agent( + self, + ) -> Callable[[agent.ValidateAgentRequest], Awaitable[agent.AgentValidationResult]]: + r"""Return a callable for the validate agent method over gRPC. + + Validates the specified agent and creates or updates + validation results. The agent in draft version is + validated. Please call this API after the training is + completed to get the complete validation results. + + Returns: + Callable[[~.ValidateAgentRequest], + Awaitable[~.AgentValidationResult]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_agent" not in self._stubs: + self._stubs["validate_agent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/ValidateAgent", + request_serializer=agent.ValidateAgentRequest.serialize, + response_deserializer=agent.AgentValidationResult.deserialize, + ) + return self._stubs["validate_agent"] + + @property + def get_agent_validation_result( + self, + ) -> Callable[ + [agent.GetAgentValidationResultRequest], Awaitable[agent.AgentValidationResult] + ]: + r"""Return a callable for the get agent validation result method over gRPC. + + Gets the latest agent validation result. Agent + validation is performed when ValidateAgent is called. + + Returns: + Callable[[~.GetAgentValidationResultRequest], + Awaitable[~.AgentValidationResult]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_agent_validation_result" not in self._stubs: + self._stubs["get_agent_validation_result"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/GetAgentValidationResult", + request_serializer=agent.GetAgentValidationResultRequest.serialize, + response_deserializer=agent.AgentValidationResult.deserialize, + ) + return self._stubs["get_agent_validation_result"] + + @property + def get_generative_settings( + self, + ) -> Callable[ + [agent.GetGenerativeSettingsRequest], + Awaitable[generative_settings.GenerativeSettings], + ]: + r"""Return a callable for the get generative settings method over gRPC. + + Gets the generative settings for the agent. + + Returns: + Callable[[~.GetGenerativeSettingsRequest], + Awaitable[~.GenerativeSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_generative_settings" not in self._stubs: + self._stubs["get_generative_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/GetGenerativeSettings", + request_serializer=agent.GetGenerativeSettingsRequest.serialize, + response_deserializer=generative_settings.GenerativeSettings.deserialize, + ) + return self._stubs["get_generative_settings"] + + @property + def update_generative_settings( + self, + ) -> Callable[ + [agent.UpdateGenerativeSettingsRequest], + Awaitable[gcdc_generative_settings.GenerativeSettings], + ]: + r"""Return a callable for the update generative settings method over gRPC. + + Updates the generative settings for the agent. + + Returns: + Callable[[~.UpdateGenerativeSettingsRequest], + Awaitable[~.GenerativeSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_generative_settings" not in self._stubs: + self._stubs["update_generative_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Agents/UpdateGenerativeSettings", + request_serializer=agent.UpdateGenerativeSettingsRequest.serialize, + response_deserializer=gcdc_generative_settings.GenerativeSettings.deserialize, + ) + return self._stubs["update_generative_settings"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("AgentsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/rest.py new file mode 100644 index 000000000000..e5b6e943fea9 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/rest.py @@ -0,0 +1,2165 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + generative_settings as gcdc_generative_settings, +) +from google.cloud.dialogflowcx_v3.types import agent +from google.cloud.dialogflowcx_v3.types import agent as gcdc_agent +from google.cloud.dialogflowcx_v3.types import generative_settings + +from .base import AgentsTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AgentsRestInterceptor: + """Interceptor for Agents. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AgentsRestTransport. + + .. code-block:: python + class MyCustomAgentsInterceptor(AgentsRestInterceptor): + def pre_create_agent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_agent(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_agent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_export_agent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_agent(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_agent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_agent(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_agent_validation_result(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_agent_validation_result(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_generative_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_generative_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_agents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_agents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restore_agent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_agent(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_agent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_agent(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_generative_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_generative_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_validate_agent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_validate_agent(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AgentsRestTransport(interceptor=MyCustomAgentsInterceptor()) + client = AgentsClient(transport=transport) + + + """ + + def pre_create_agent( + self, + request: gcdc_agent.CreateAgentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_agent.CreateAgentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_agent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_create_agent(self, response: gcdc_agent.Agent) -> gcdc_agent.Agent: + """Post-rpc interceptor for create_agent + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_delete_agent( + self, request: agent.DeleteAgentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[agent.DeleteAgentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_agent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def pre_export_agent( + self, request: agent.ExportAgentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[agent.ExportAgentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_agent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_export_agent( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_agent + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_get_agent( + self, request: agent.GetAgentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[agent.GetAgentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_agent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_get_agent(self, response: agent.Agent) -> agent.Agent: + """Post-rpc interceptor for get_agent + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_get_agent_validation_result( + self, + request: agent.GetAgentValidationResultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[agent.GetAgentValidationResultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_agent_validation_result + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_get_agent_validation_result( + self, response: agent.AgentValidationResult + ) -> agent.AgentValidationResult: + """Post-rpc interceptor for get_agent_validation_result + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_get_generative_settings( + self, + request: agent.GetGenerativeSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[agent.GetGenerativeSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_generative_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_get_generative_settings( + self, response: generative_settings.GenerativeSettings + ) -> generative_settings.GenerativeSettings: + """Post-rpc interceptor for get_generative_settings + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_list_agents( + self, request: agent.ListAgentsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[agent.ListAgentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_agents + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_list_agents( + self, response: agent.ListAgentsResponse + ) -> agent.ListAgentsResponse: + """Post-rpc interceptor for list_agents + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_restore_agent( + self, request: agent.RestoreAgentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[agent.RestoreAgentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restore_agent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_restore_agent( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_agent + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_update_agent( + self, + request: gcdc_agent.UpdateAgentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_agent.UpdateAgentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_agent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_update_agent(self, response: gcdc_agent.Agent) -> gcdc_agent.Agent: + """Post-rpc interceptor for update_agent + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_update_generative_settings( + self, + request: agent.UpdateGenerativeSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[agent.UpdateGenerativeSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_generative_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_update_generative_settings( + self, response: gcdc_generative_settings.GenerativeSettings + ) -> gcdc_generative_settings.GenerativeSettings: + """Post-rpc interceptor for update_generative_settings + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_validate_agent( + self, request: agent.ValidateAgentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[agent.ValidateAgentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for validate_agent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_validate_agent( + self, response: agent.AgentValidationResult + ) -> agent.AgentValidationResult: + """Post-rpc interceptor for validate_agent + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Agents server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Agents server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AgentsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AgentsRestInterceptor + + +class AgentsRestTransport(AgentsTransport): + """REST backend transport for Agents. + + Service for managing [Agents][google.cloud.dialogflow.cx.v3.Agent]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AgentsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AgentsRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v3", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateAgent(AgentsRestStub): + def __hash__(self): + return hash("CreateAgent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_agent.CreateAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_agent.Agent: + r"""Call the create agent method over HTTP. + + Args: + request (~.gcdc_agent.CreateAgentRequest): + The request object. The request message for + [Agents.CreateAgent][google.cloud.dialogflow.cx.v3.Agents.CreateAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_agent.Agent: + Agents are best described as Natural Language + Understanding (NLU) modules that transform user requests + into actionable data. You can include agents in your + app, product, or service to determine user intent and + respond to the user in a natural way. + + After you create an agent, you can add + [Intents][google.cloud.dialogflow.cx.v3.Intent], [Entity + Types][google.cloud.dialogflow.cx.v3.EntityType], + [Flows][google.cloud.dialogflow.cx.v3.Flow], + [Fulfillments][google.cloud.dialogflow.cx.v3.Fulfillment], + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook], + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + and so on to manage the conversation flows. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}/agents", + "body": "agent", + }, + ] + request, metadata = self._interceptor.pre_create_agent(request, metadata) + pb_request = gcdc_agent.CreateAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_agent.Agent() + pb_resp = gcdc_agent.Agent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_agent(resp) + return resp + + class _DeleteAgent(AgentsRestStub): + def __hash__(self): + return hash("DeleteAgent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: agent.DeleteAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete agent method over HTTP. + + Args: + request (~.agent.DeleteAgentRequest): + The request object. The request message for + [Agents.DeleteAgent][google.cloud.dialogflow.cx.v3.Agents.DeleteAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_agent(request, metadata) + pb_request = agent.DeleteAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ExportAgent(AgentsRestStub): + def __hash__(self): + return hash("ExportAgent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: agent.ExportAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export agent method over HTTP. + + Args: + request (~.agent.ExportAgentRequest): + The request object. The request message for + [Agents.ExportAgent][google.cloud.dialogflow.cx.v3.Agents.ExportAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/agents/*}:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_agent(request, metadata) + pb_request = agent.ExportAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_agent(resp) + return resp + + class _GetAgent(AgentsRestStub): + def __hash__(self): + return hash("GetAgent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: agent.GetAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> agent.Agent: + r"""Call the get agent method over HTTP. + + Args: + request (~.agent.GetAgentRequest): + The request object. The request message for + [Agents.GetAgent][google.cloud.dialogflow.cx.v3.Agents.GetAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.agent.Agent: + Agents are best described as Natural Language + Understanding (NLU) modules that transform user requests + into actionable data. You can include agents in your + app, product, or service to determine user intent and + respond to the user in a natural way. + + After you create an agent, you can add + [Intents][google.cloud.dialogflow.cx.v3.Intent], [Entity + Types][google.cloud.dialogflow.cx.v3.EntityType], + [Flows][google.cloud.dialogflow.cx.v3.Flow], + [Fulfillments][google.cloud.dialogflow.cx.v3.Fulfillment], + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook], + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + and so on to manage the conversation flows. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*}", + }, + ] + request, metadata = self._interceptor.pre_get_agent(request, metadata) + pb_request = agent.GetAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = agent.Agent() + pb_resp = agent.Agent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_agent(resp) + return resp + + class _GetAgentValidationResult(AgentsRestStub): + def __hash__(self): + return hash("GetAgentValidationResult") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: agent.GetAgentValidationResultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> agent.AgentValidationResult: + r"""Call the get agent validation + result method over HTTP. + + Args: + request (~.agent.GetAgentValidationResultRequest): + The request object. The request message for + [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.agent.AgentValidationResult: + The response message for + [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/validationResult}", + }, + ] + request, metadata = self._interceptor.pre_get_agent_validation_result( + request, metadata + ) + pb_request = agent.GetAgentValidationResultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = agent.AgentValidationResult() + pb_resp = agent.AgentValidationResult.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_agent_validation_result(resp) + return resp + + class _GetGenerativeSettings(AgentsRestStub): + def __hash__(self): + return hash("GetGenerativeSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "languageCode": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: agent.GetGenerativeSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> generative_settings.GenerativeSettings: + r"""Call the get generative settings method over HTTP. + + Args: + request (~.agent.GetGenerativeSettingsRequest): + The request object. Request for + [GetGenerativeSettings][google.cloud.dialogflow.cx.v3.Agents.GetGenerativeSettings] + RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.generative_settings.GenerativeSettings: + Settings for Generative AI. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/generativeSettings}", + }, + ] + request, metadata = self._interceptor.pre_get_generative_settings( + request, metadata + ) + pb_request = agent.GetGenerativeSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_settings.GenerativeSettings() + pb_resp = generative_settings.GenerativeSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_generative_settings(resp) + return resp + + class _ListAgents(AgentsRestStub): + def __hash__(self): + return hash("ListAgents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: agent.ListAgentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> agent.ListAgentsResponse: + r"""Call the list agents method over HTTP. + + Args: + request (~.agent.ListAgentsRequest): + The request object. The request message for + [Agents.ListAgents][google.cloud.dialogflow.cx.v3.Agents.ListAgents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.agent.ListAgentsResponse: + The response message for + [Agents.ListAgents][google.cloud.dialogflow.cx.v3.Agents.ListAgents]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*}/agents", + }, + ] + request, metadata = self._interceptor.pre_list_agents(request, metadata) + pb_request = agent.ListAgentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = agent.ListAgentsResponse() + pb_resp = agent.ListAgentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_agents(resp) + return resp + + class _RestoreAgent(AgentsRestStub): + def __hash__(self): + return hash("RestoreAgent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: agent.RestoreAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore agent method over HTTP. + + Args: + request (~.agent.RestoreAgentRequest): + The request object. The request message for + [Agents.RestoreAgent][google.cloud.dialogflow.cx.v3.Agents.RestoreAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/agents/*}:restore", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restore_agent(request, metadata) + pb_request = agent.RestoreAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_agent(resp) + return resp + + class _UpdateAgent(AgentsRestStub): + def __hash__(self): + return hash("UpdateAgent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_agent.UpdateAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_agent.Agent: + r"""Call the update agent method over HTTP. + + Args: + request (~.gcdc_agent.UpdateAgentRequest): + The request object. The request message for + [Agents.UpdateAgent][google.cloud.dialogflow.cx.v3.Agents.UpdateAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_agent.Agent: + Agents are best described as Natural Language + Understanding (NLU) modules that transform user requests + into actionable data. You can include agents in your + app, product, or service to determine user intent and + respond to the user in a natural way. + + After you create an agent, you can add + [Intents][google.cloud.dialogflow.cx.v3.Intent], [Entity + Types][google.cloud.dialogflow.cx.v3.EntityType], + [Flows][google.cloud.dialogflow.cx.v3.Flow], + [Fulfillments][google.cloud.dialogflow.cx.v3.Fulfillment], + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook], + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + and so on to manage the conversation flows. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{agent.name=projects/*/locations/*/agents/*}", + "body": "agent", + }, + ] + request, metadata = self._interceptor.pre_update_agent(request, metadata) + pb_request = gcdc_agent.UpdateAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_agent.Agent() + pb_resp = gcdc_agent.Agent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_agent(resp) + return resp + + class _UpdateGenerativeSettings(AgentsRestStub): + def __hash__(self): + return hash("UpdateGenerativeSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: agent.UpdateGenerativeSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_generative_settings.GenerativeSettings: + r"""Call the update generative + settings method over HTTP. + + Args: + request (~.agent.UpdateGenerativeSettingsRequest): + The request object. Request for + [UpdateGenerativeSettings][google.cloud.dialogflow.cx.v3.Agents.UpdateGenerativeSettings] + RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_generative_settings.GenerativeSettings: + Settings for Generative AI. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{generative_settings.name=projects/*/locations/*/agents/*/generativeSettings}", + "body": "generative_settings", + }, + ] + request, metadata = self._interceptor.pre_update_generative_settings( + request, metadata + ) + pb_request = agent.UpdateGenerativeSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_generative_settings.GenerativeSettings() + pb_resp = gcdc_generative_settings.GenerativeSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_generative_settings(resp) + return resp + + class _ValidateAgent(AgentsRestStub): + def __hash__(self): + return hash("ValidateAgent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: agent.ValidateAgentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> agent.AgentValidationResult: + r"""Call the validate agent method over HTTP. + + Args: + request (~.agent.ValidateAgentRequest): + The request object. The request message for + [Agents.ValidateAgent][google.cloud.dialogflow.cx.v3.Agents.ValidateAgent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.agent.AgentValidationResult: + The response message for + [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/agents/*}:validate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_validate_agent(request, metadata) + pb_request = agent.ValidateAgentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = agent.AgentValidationResult() + pb_resp = agent.AgentValidationResult.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_validate_agent(resp) + return resp + + @property + def create_agent( + self, + ) -> Callable[[gcdc_agent.CreateAgentRequest], gcdc_agent.Agent]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAgent(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_agent(self) -> Callable[[agent.DeleteAgentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAgent(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_agent( + self, + ) -> Callable[[agent.ExportAgentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportAgent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_agent(self) -> Callable[[agent.GetAgentRequest], agent.Agent]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAgent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_agent_validation_result( + self, + ) -> Callable[[agent.GetAgentValidationResultRequest], agent.AgentValidationResult]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAgentValidationResult(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_generative_settings( + self, + ) -> Callable[ + [agent.GetGenerativeSettingsRequest], generative_settings.GenerativeSettings + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGenerativeSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_agents( + self, + ) -> Callable[[agent.ListAgentsRequest], agent.ListAgentsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAgents(self._session, self._host, self._interceptor) # type: ignore + + @property + def restore_agent( + self, + ) -> Callable[[agent.RestoreAgentRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreAgent(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_agent( + self, + ) -> Callable[[gcdc_agent.UpdateAgentRequest], gcdc_agent.Agent]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAgent(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_generative_settings( + self, + ) -> Callable[ + [agent.UpdateGenerativeSettingsRequest], + gcdc_generative_settings.GenerativeSettings, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGenerativeSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def validate_agent( + self, + ) -> Callable[[agent.ValidateAgentRequest], agent.AgentValidationResult]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ValidateAgent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(AgentsRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(AgentsRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(AgentsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AgentsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(AgentsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AgentsRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/__init__.py new file mode 100644 index 000000000000..6a570679c781 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ChangelogsAsyncClient +from .client import ChangelogsClient + +__all__ = ( + "ChangelogsClient", + "ChangelogsAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py new file mode 100644 index 000000000000..ae53e5b29289 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/async_client.py @@ -0,0 +1,716 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.changelogs import pagers +from google.cloud.dialogflowcx_v3.types import changelog + +from .client import ChangelogsClient +from .transports.base import DEFAULT_CLIENT_INFO, ChangelogsTransport +from .transports.grpc_asyncio import ChangelogsGrpcAsyncIOTransport + + +class ChangelogsAsyncClient: + """Service for managing + [Changelogs][google.cloud.dialogflow.cx.v3.Changelog]. + """ + + _client: ChangelogsClient + + DEFAULT_ENDPOINT = ChangelogsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ChangelogsClient.DEFAULT_MTLS_ENDPOINT + + changelog_path = staticmethod(ChangelogsClient.changelog_path) + parse_changelog_path = staticmethod(ChangelogsClient.parse_changelog_path) + common_billing_account_path = staticmethod( + ChangelogsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ChangelogsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ChangelogsClient.common_folder_path) + parse_common_folder_path = staticmethod(ChangelogsClient.parse_common_folder_path) + common_organization_path = staticmethod(ChangelogsClient.common_organization_path) + parse_common_organization_path = staticmethod( + ChangelogsClient.parse_common_organization_path + ) + common_project_path = staticmethod(ChangelogsClient.common_project_path) + parse_common_project_path = staticmethod(ChangelogsClient.parse_common_project_path) + common_location_path = staticmethod(ChangelogsClient.common_location_path) + parse_common_location_path = staticmethod( + ChangelogsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ChangelogsAsyncClient: The constructed client. + """ + return ChangelogsClient.from_service_account_info.__func__(ChangelogsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ChangelogsAsyncClient: The constructed client. + """ + return ChangelogsClient.from_service_account_file.__func__(ChangelogsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ChangelogsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ChangelogsTransport: + """Returns the transport used by the client instance. + + Returns: + ChangelogsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ChangelogsClient).get_transport_class, type(ChangelogsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ChangelogsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the changelogs client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ChangelogsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ChangelogsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_changelogs( + self, + request: Optional[Union[changelog.ListChangelogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChangelogsAsyncPager: + r"""Returns the list of Changelogs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_changelogs(): + # Create a client + client = dialogflowcx_v3.ChangelogsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListChangelogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_changelogs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListChangelogsRequest, dict]]): + The request object. The request message for + [Changelogs.ListChangelogs][google.cloud.dialogflow.cx.v3.Changelogs.ListChangelogs]. + parent (:class:`str`): + Required. The agent containing the changelogs. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.changelogs.pagers.ListChangelogsAsyncPager: + The response message for + [Changelogs.ListChangelogs][google.cloud.dialogflow.cx.v3.Changelogs.ListChangelogs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = changelog.ListChangelogsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_changelogs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListChangelogsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_changelog( + self, + request: Optional[Union[changelog.GetChangelogRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> changelog.Changelog: + r"""Retrieves the specified Changelog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_changelog(): + # Create a client + client = dialogflowcx_v3.ChangelogsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetChangelogRequest( + name="name_value", + ) + + # Make the request + response = await client.get_changelog(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetChangelogRequest, dict]]): + The request object. The request message for + [Changelogs.GetChangelog][google.cloud.dialogflow.cx.v3.Changelogs.GetChangelog]. + name (:class:`str`): + Required. The name of the changelog to get. Format: + ``projects//locations//agents//changelogs/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Changelog: + Changelogs represents a change made + to a given agent. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = changelog.GetChangelogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_changelog, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ChangelogsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ChangelogsAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py new file mode 100644 index 000000000000..7b2e8af26194 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py @@ -0,0 +1,958 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.changelogs import pagers +from google.cloud.dialogflowcx_v3.types import changelog + +from .transports.base import DEFAULT_CLIENT_INFO, ChangelogsTransport +from .transports.grpc import ChangelogsGrpcTransport +from .transports.grpc_asyncio import ChangelogsGrpcAsyncIOTransport +from .transports.rest import ChangelogsRestTransport + + +class ChangelogsClientMeta(type): + """Metaclass for the Changelogs client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ChangelogsTransport]] + _transport_registry["grpc"] = ChangelogsGrpcTransport + _transport_registry["grpc_asyncio"] = ChangelogsGrpcAsyncIOTransport + _transport_registry["rest"] = ChangelogsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ChangelogsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ChangelogsClient(metaclass=ChangelogsClientMeta): + """Service for managing + [Changelogs][google.cloud.dialogflow.cx.v3.Changelog]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ChangelogsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ChangelogsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ChangelogsTransport: + """Returns the transport used by the client instance. + + Returns: + ChangelogsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def changelog_path( + project: str, + location: str, + agent: str, + changelog: str, + ) -> str: + """Returns a fully-qualified changelog string.""" + return "projects/{project}/locations/{location}/agents/{agent}/changelogs/{changelog}".format( + project=project, + location=location, + agent=agent, + changelog=changelog, + ) + + @staticmethod + def parse_changelog_path(path: str) -> Dict[str, str]: + """Parses a changelog path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/changelogs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ChangelogsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the changelogs client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ChangelogsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ChangelogsTransport): + # transport is a ChangelogsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_changelogs( + self, + request: Optional[Union[changelog.ListChangelogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChangelogsPager: + r"""Returns the list of Changelogs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_changelogs(): + # Create a client + client = dialogflowcx_v3.ChangelogsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListChangelogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_changelogs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListChangelogsRequest, dict]): + The request object. The request message for + [Changelogs.ListChangelogs][google.cloud.dialogflow.cx.v3.Changelogs.ListChangelogs]. + parent (str): + Required. The agent containing the changelogs. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.changelogs.pagers.ListChangelogsPager: + The response message for + [Changelogs.ListChangelogs][google.cloud.dialogflow.cx.v3.Changelogs.ListChangelogs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a changelog.ListChangelogsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, changelog.ListChangelogsRequest): + request = changelog.ListChangelogsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_changelogs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListChangelogsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_changelog( + self, + request: Optional[Union[changelog.GetChangelogRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> changelog.Changelog: + r"""Retrieves the specified Changelog. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_changelog(): + # Create a client + client = dialogflowcx_v3.ChangelogsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetChangelogRequest( + name="name_value", + ) + + # Make the request + response = client.get_changelog(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetChangelogRequest, dict]): + The request object. The request message for + [Changelogs.GetChangelog][google.cloud.dialogflow.cx.v3.Changelogs.GetChangelog]. + name (str): + Required. The name of the changelog to get. Format: + ``projects//locations//agents//changelogs/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Changelog: + Changelogs represents a change made + to a given agent. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a changelog.GetChangelogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, changelog.GetChangelogRequest): + request = changelog.GetChangelogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_changelog] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ChangelogsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ChangelogsClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/pagers.py new file mode 100644 index 000000000000..555a566c2706 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import changelog + + +class ListChangelogsPager: + """A pager for iterating through ``list_changelogs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListChangelogsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``changelogs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListChangelogs`` requests and continue to iterate + through the ``changelogs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListChangelogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., changelog.ListChangelogsResponse], + request: changelog.ListChangelogsRequest, + response: changelog.ListChangelogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListChangelogsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListChangelogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = changelog.ListChangelogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[changelog.ListChangelogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[changelog.Changelog]: + for page in self.pages: + yield from page.changelogs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListChangelogsAsyncPager: + """A pager for iterating through ``list_changelogs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListChangelogsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``changelogs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListChangelogs`` requests and continue to iterate + through the ``changelogs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListChangelogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[changelog.ListChangelogsResponse]], + request: changelog.ListChangelogsRequest, + response: changelog.ListChangelogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListChangelogsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListChangelogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = changelog.ListChangelogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[changelog.ListChangelogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[changelog.Changelog]: + async def async_generator(): + async for page in self.pages: + for response in page.changelogs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/__init__.py new file mode 100644 index 000000000000..8db8f657549f --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ChangelogsTransport +from .grpc import ChangelogsGrpcTransport +from .grpc_asyncio import ChangelogsGrpcAsyncIOTransport +from .rest import ChangelogsRestInterceptor, ChangelogsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ChangelogsTransport]] +_transport_registry["grpc"] = ChangelogsGrpcTransport +_transport_registry["grpc_asyncio"] = ChangelogsGrpcAsyncIOTransport +_transport_registry["rest"] = ChangelogsRestTransport + +__all__ = ( + "ChangelogsTransport", + "ChangelogsGrpcTransport", + "ChangelogsGrpcAsyncIOTransport", + "ChangelogsRestTransport", + "ChangelogsRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/base.py new file mode 100644 index 000000000000..50949946372a --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/base.py @@ -0,0 +1,225 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import changelog + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ChangelogsTransport(abc.ABC): + """Abstract transport class for Changelogs.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_changelogs: gapic_v1.method.wrap_method( + self.list_changelogs, + default_timeout=None, + client_info=client_info, + ), + self.get_changelog: gapic_v1.method.wrap_method( + self.get_changelog, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_changelogs( + self, + ) -> Callable[ + [changelog.ListChangelogsRequest], + Union[ + changelog.ListChangelogsResponse, + Awaitable[changelog.ListChangelogsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_changelog( + self, + ) -> Callable[ + [changelog.GetChangelogRequest], + Union[changelog.Changelog, Awaitable[changelog.Changelog]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ChangelogsTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/grpc.py new file mode 100644 index 000000000000..2e3abd48b52d --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/grpc.py @@ -0,0 +1,383 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import changelog + +from .base import DEFAULT_CLIENT_INFO, ChangelogsTransport + + +class ChangelogsGrpcTransport(ChangelogsTransport): + """gRPC backend transport for Changelogs. + + Service for managing + [Changelogs][google.cloud.dialogflow.cx.v3.Changelog]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_changelogs( + self, + ) -> Callable[[changelog.ListChangelogsRequest], changelog.ListChangelogsResponse]: + r"""Return a callable for the list changelogs method over gRPC. + + Returns the list of Changelogs. + + Returns: + Callable[[~.ListChangelogsRequest], + ~.ListChangelogsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_changelogs" not in self._stubs: + self._stubs["list_changelogs"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Changelogs/ListChangelogs", + request_serializer=changelog.ListChangelogsRequest.serialize, + response_deserializer=changelog.ListChangelogsResponse.deserialize, + ) + return self._stubs["list_changelogs"] + + @property + def get_changelog( + self, + ) -> Callable[[changelog.GetChangelogRequest], changelog.Changelog]: + r"""Return a callable for the get changelog method over gRPC. + + Retrieves the specified Changelog. + + Returns: + Callable[[~.GetChangelogRequest], + ~.Changelog]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_changelog" not in self._stubs: + self._stubs["get_changelog"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Changelogs/GetChangelog", + request_serializer=changelog.GetChangelogRequest.serialize, + response_deserializer=changelog.Changelog.deserialize, + ) + return self._stubs["get_changelog"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ChangelogsGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c8cd5493ca7b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/grpc_asyncio.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import changelog + +from .base import DEFAULT_CLIENT_INFO, ChangelogsTransport +from .grpc import ChangelogsGrpcTransport + + +class ChangelogsGrpcAsyncIOTransport(ChangelogsTransport): + """gRPC AsyncIO backend transport for Changelogs. + + Service for managing + [Changelogs][google.cloud.dialogflow.cx.v3.Changelog]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_changelogs( + self, + ) -> Callable[ + [changelog.ListChangelogsRequest], Awaitable[changelog.ListChangelogsResponse] + ]: + r"""Return a callable for the list changelogs method over gRPC. + + Returns the list of Changelogs. + + Returns: + Callable[[~.ListChangelogsRequest], + Awaitable[~.ListChangelogsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_changelogs" not in self._stubs: + self._stubs["list_changelogs"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Changelogs/ListChangelogs", + request_serializer=changelog.ListChangelogsRequest.serialize, + response_deserializer=changelog.ListChangelogsResponse.deserialize, + ) + return self._stubs["list_changelogs"] + + @property + def get_changelog( + self, + ) -> Callable[[changelog.GetChangelogRequest], Awaitable[changelog.Changelog]]: + r"""Return a callable for the get changelog method over gRPC. + + Retrieves the specified Changelog. + + Returns: + Callable[[~.GetChangelogRequest], + Awaitable[~.Changelog]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_changelog" not in self._stubs: + self._stubs["get_changelog"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Changelogs/GetChangelog", + request_serializer=changelog.GetChangelogRequest.serialize, + response_deserializer=changelog.Changelog.deserialize, + ) + return self._stubs["get_changelog"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("ChangelogsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/rest.py new file mode 100644 index 000000000000..ded9a198c71e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/rest.py @@ -0,0 +1,891 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import changelog + +from .base import ChangelogsTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ChangelogsRestInterceptor: + """Interceptor for Changelogs. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ChangelogsRestTransport. + + .. code-block:: python + class MyCustomChangelogsInterceptor(ChangelogsRestInterceptor): + def pre_get_changelog(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_changelog(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_changelogs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_changelogs(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ChangelogsRestTransport(interceptor=MyCustomChangelogsInterceptor()) + client = ChangelogsClient(transport=transport) + + + """ + + def pre_get_changelog( + self, + request: changelog.GetChangelogRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[changelog.GetChangelogRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_changelog + + Override in a subclass to manipulate the request or metadata + before they are sent to the Changelogs server. + """ + return request, metadata + + def post_get_changelog(self, response: changelog.Changelog) -> changelog.Changelog: + """Post-rpc interceptor for get_changelog + + Override in a subclass to manipulate the response + after it is returned by the Changelogs server but before + it is returned to user code. + """ + return response + + def pre_list_changelogs( + self, + request: changelog.ListChangelogsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[changelog.ListChangelogsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_changelogs + + Override in a subclass to manipulate the request or metadata + before they are sent to the Changelogs server. + """ + return request, metadata + + def post_list_changelogs( + self, response: changelog.ListChangelogsResponse + ) -> changelog.ListChangelogsResponse: + """Post-rpc interceptor for list_changelogs + + Override in a subclass to manipulate the response + after it is returned by the Changelogs server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Changelogs server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Changelogs server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Changelogs server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Changelogs server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Changelogs server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Changelogs server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Changelogs server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Changelogs server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Changelogs server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Changelogs server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ChangelogsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ChangelogsRestInterceptor + + +class ChangelogsRestTransport(ChangelogsTransport): + """REST backend transport for Changelogs. + + Service for managing + [Changelogs][google.cloud.dialogflow.cx.v3.Changelog]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ChangelogsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ChangelogsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetChangelog(ChangelogsRestStub): + def __hash__(self): + return hash("GetChangelog") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: changelog.GetChangelogRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> changelog.Changelog: + r"""Call the get changelog method over HTTP. + + Args: + request (~.changelog.GetChangelogRequest): + The request object. The request message for + [Changelogs.GetChangelog][google.cloud.dialogflow.cx.v3.Changelogs.GetChangelog]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.changelog.Changelog: + Changelogs represents a change made + to a given agent. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/changelogs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_changelog(request, metadata) + pb_request = changelog.GetChangelogRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = changelog.Changelog() + pb_resp = changelog.Changelog.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_changelog(resp) + return resp + + class _ListChangelogs(ChangelogsRestStub): + def __hash__(self): + return hash("ListChangelogs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: changelog.ListChangelogsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> changelog.ListChangelogsResponse: + r"""Call the list changelogs method over HTTP. + + Args: + request (~.changelog.ListChangelogsRequest): + The request object. The request message for + [Changelogs.ListChangelogs][google.cloud.dialogflow.cx.v3.Changelogs.ListChangelogs]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.changelog.ListChangelogsResponse: + The response message for + [Changelogs.ListChangelogs][google.cloud.dialogflow.cx.v3.Changelogs.ListChangelogs]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/changelogs", + }, + ] + request, metadata = self._interceptor.pre_list_changelogs(request, metadata) + pb_request = changelog.ListChangelogsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = changelog.ListChangelogsResponse() + pb_resp = changelog.ListChangelogsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_changelogs(resp) + return resp + + @property + def get_changelog( + self, + ) -> Callable[[changelog.GetChangelogRequest], changelog.Changelog]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetChangelog(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_changelogs( + self, + ) -> Callable[[changelog.ListChangelogsRequest], changelog.ListChangelogsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListChangelogs(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ChangelogsRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ChangelogsRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ChangelogsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ChangelogsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ChangelogsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ChangelogsRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/__init__.py new file mode 100644 index 000000000000..0dac6b8818be --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DeploymentsAsyncClient +from .client import DeploymentsClient + +__all__ = ( + "DeploymentsClient", + "DeploymentsAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py new file mode 100644 index 000000000000..2aef46f28891 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/async_client.py @@ -0,0 +1,737 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.deployments import pagers +from google.cloud.dialogflowcx_v3.types import deployment + +from .client import DeploymentsClient +from .transports.base import DEFAULT_CLIENT_INFO, DeploymentsTransport +from .transports.grpc_asyncio import DeploymentsGrpcAsyncIOTransport + + +class DeploymentsAsyncClient: + """Service for managing + [Deployments][google.cloud.dialogflow.cx.v3.Deployment]. + """ + + _client: DeploymentsClient + + DEFAULT_ENDPOINT = DeploymentsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DeploymentsClient.DEFAULT_MTLS_ENDPOINT + + deployment_path = staticmethod(DeploymentsClient.deployment_path) + parse_deployment_path = staticmethod(DeploymentsClient.parse_deployment_path) + experiment_path = staticmethod(DeploymentsClient.experiment_path) + parse_experiment_path = staticmethod(DeploymentsClient.parse_experiment_path) + test_case_result_path = staticmethod(DeploymentsClient.test_case_result_path) + parse_test_case_result_path = staticmethod( + DeploymentsClient.parse_test_case_result_path + ) + version_path = staticmethod(DeploymentsClient.version_path) + parse_version_path = staticmethod(DeploymentsClient.parse_version_path) + common_billing_account_path = staticmethod( + DeploymentsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DeploymentsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DeploymentsClient.common_folder_path) + parse_common_folder_path = staticmethod(DeploymentsClient.parse_common_folder_path) + common_organization_path = staticmethod(DeploymentsClient.common_organization_path) + parse_common_organization_path = staticmethod( + DeploymentsClient.parse_common_organization_path + ) + common_project_path = staticmethod(DeploymentsClient.common_project_path) + parse_common_project_path = staticmethod( + DeploymentsClient.parse_common_project_path + ) + common_location_path = staticmethod(DeploymentsClient.common_location_path) + parse_common_location_path = staticmethod( + DeploymentsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeploymentsAsyncClient: The constructed client. + """ + return DeploymentsClient.from_service_account_info.__func__(DeploymentsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeploymentsAsyncClient: The constructed client. + """ + return DeploymentsClient.from_service_account_file.__func__(DeploymentsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DeploymentsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DeploymentsTransport: + """Returns the transport used by the client instance. + + Returns: + DeploymentsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(DeploymentsClient).get_transport_class, type(DeploymentsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DeploymentsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the deployments client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DeploymentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DeploymentsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_deployments( + self, + request: Optional[Union[deployment.ListDeploymentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeploymentsAsyncPager: + r"""Returns the list of all deployments in the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_deployments(): + # Create a client + client = dialogflowcx_v3.DeploymentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListDeploymentsRequest, dict]]): + The request object. The request message for + [Deployments.ListDeployments][google.cloud.dialogflow.cx.v3.Deployments.ListDeployments]. + parent (:class:`str`): + Required. The + [Environment][google.cloud.dialogflow.cx.v3.Environment] + to list all environments for. Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.deployments.pagers.ListDeploymentsAsyncPager: + The response message for + [Deployments.ListDeployments][google.cloud.dialogflow.cx.v3.Deployments.ListDeployments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = deployment.ListDeploymentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_deployments, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeploymentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deployment( + self, + request: Optional[Union[deployment.GetDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> deployment.Deployment: + r"""Retrieves the specified + [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_deployment(): + # Create a client + client = dialogflowcx_v3.DeploymentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetDeploymentRequest, dict]]): + The request object. The request message for + [Deployments.GetDeployment][google.cloud.dialogflow.cx.v3.Deployments.GetDeployment]. + name (:class:`str`): + Required. The name of the + [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. + Format: + ``projects//locations//agents//environments//deployments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Deployment: + Represents a deployment in an + environment. A deployment happens when a + flow version configured to be active in + the environment. You can configure + running pre-deployment steps, e.g. + running validation test cases, + experiment auto-rollout, etc. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = deployment.GetDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DeploymentsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DeploymentsAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py new file mode 100644 index 000000000000..0a82ec3d682d --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py @@ -0,0 +1,1049 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.deployments import pagers +from google.cloud.dialogflowcx_v3.types import deployment + +from .transports.base import DEFAULT_CLIENT_INFO, DeploymentsTransport +from .transports.grpc import DeploymentsGrpcTransport +from .transports.grpc_asyncio import DeploymentsGrpcAsyncIOTransport +from .transports.rest import DeploymentsRestTransport + + +class DeploymentsClientMeta(type): + """Metaclass for the Deployments client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[DeploymentsTransport]] + _transport_registry["grpc"] = DeploymentsGrpcTransport + _transport_registry["grpc_asyncio"] = DeploymentsGrpcAsyncIOTransport + _transport_registry["rest"] = DeploymentsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DeploymentsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DeploymentsClient(metaclass=DeploymentsClientMeta): + """Service for managing + [Deployments][google.cloud.dialogflow.cx.v3.Deployment]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeploymentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeploymentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DeploymentsTransport: + """Returns the transport used by the client instance. + + Returns: + DeploymentsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def deployment_path( + project: str, + location: str, + agent: str, + environment: str, + deployment: str, + ) -> str: + """Returns a fully-qualified deployment string.""" + return "projects/{project}/locations/{location}/agents/{agent}/environments/{environment}/deployments/{deployment}".format( + project=project, + location=location, + agent=agent, + environment=environment, + deployment=deployment, + ) + + @staticmethod + def parse_deployment_path(path: str) -> Dict[str, str]: + """Parses a deployment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/environments/(?P.+?)/deployments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def experiment_path( + project: str, + location: str, + agent: str, + environment: str, + experiment: str, + ) -> str: + """Returns a fully-qualified experiment string.""" + return "projects/{project}/locations/{location}/agents/{agent}/environments/{environment}/experiments/{experiment}".format( + project=project, + location=location, + agent=agent, + environment=environment, + experiment=experiment, + ) + + @staticmethod + def parse_experiment_path(path: str) -> Dict[str, str]: + """Parses a experiment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/environments/(?P.+?)/experiments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def test_case_result_path( + project: str, + location: str, + agent: str, + test_case: str, + result: str, + ) -> str: + """Returns a fully-qualified test_case_result string.""" + return "projects/{project}/locations/{location}/agents/{agent}/testCases/{test_case}/results/{result}".format( + project=project, + location=location, + agent=agent, + test_case=test_case, + result=result, + ) + + @staticmethod + def parse_test_case_result_path(path: str) -> Dict[str, str]: + """Parses a test_case_result path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/testCases/(?P.+?)/results/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def version_path( + project: str, + location: str, + agent: str, + flow: str, + version: str, + ) -> str: + """Returns a fully-qualified version string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/versions/{version}".format( + project=project, + location=location, + agent=agent, + flow=flow, + version=version, + ) + + @staticmethod + def parse_version_path(path: str) -> Dict[str, str]: + """Parses a version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/versions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DeploymentsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the deployments client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DeploymentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DeploymentsTransport): + # transport is a DeploymentsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_deployments( + self, + request: Optional[Union[deployment.ListDeploymentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeploymentsPager: + r"""Returns the list of all deployments in the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_deployments(): + # Create a client + client = dialogflowcx_v3.DeploymentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListDeploymentsRequest, dict]): + The request object. The request message for + [Deployments.ListDeployments][google.cloud.dialogflow.cx.v3.Deployments.ListDeployments]. + parent (str): + Required. The + [Environment][google.cloud.dialogflow.cx.v3.Environment] + to list all environments for. Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.deployments.pagers.ListDeploymentsPager: + The response message for + [Deployments.ListDeployments][google.cloud.dialogflow.cx.v3.Deployments.ListDeployments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a deployment.ListDeploymentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, deployment.ListDeploymentsRequest): + request = deployment.ListDeploymentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deployments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeploymentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deployment( + self, + request: Optional[Union[deployment.GetDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> deployment.Deployment: + r"""Retrieves the specified + [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_deployment(): + # Create a client + client = dialogflowcx_v3.DeploymentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetDeploymentRequest, dict]): + The request object. The request message for + [Deployments.GetDeployment][google.cloud.dialogflow.cx.v3.Deployments.GetDeployment]. + name (str): + Required. The name of the + [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. + Format: + ``projects//locations//agents//environments//deployments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Deployment: + Represents a deployment in an + environment. A deployment happens when a + flow version configured to be active in + the environment. You can configure + running pre-deployment steps, e.g. + running validation test cases, + experiment auto-rollout, etc. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a deployment.GetDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, deployment.GetDeploymentRequest): + request = deployment.GetDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DeploymentsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DeploymentsClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/pagers.py new file mode 100644 index 000000000000..f3350d25842b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import deployment + + +class ListDeploymentsPager: + """A pager for iterating through ``list_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListDeploymentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deployments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeployments`` requests and continue to iterate + through the ``deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., deployment.ListDeploymentsResponse], + request: deployment.ListDeploymentsRequest, + response: deployment.ListDeploymentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListDeploymentsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListDeploymentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = deployment.ListDeploymentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[deployment.ListDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[deployment.Deployment]: + for page in self.pages: + yield from page.deployments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeploymentsAsyncPager: + """A pager for iterating through ``list_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListDeploymentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deployments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeployments`` requests and continue to iterate + through the ``deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[deployment.ListDeploymentsResponse]], + request: deployment.ListDeploymentsRequest, + response: deployment.ListDeploymentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListDeploymentsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListDeploymentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = deployment.ListDeploymentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[deployment.ListDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[deployment.Deployment]: + async def async_generator(): + async for page in self.pages: + for response in page.deployments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/__init__.py new file mode 100644 index 000000000000..da2da74bdc29 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DeploymentsTransport +from .grpc import DeploymentsGrpcTransport +from .grpc_asyncio import DeploymentsGrpcAsyncIOTransport +from .rest import DeploymentsRestInterceptor, DeploymentsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DeploymentsTransport]] +_transport_registry["grpc"] = DeploymentsGrpcTransport +_transport_registry["grpc_asyncio"] = DeploymentsGrpcAsyncIOTransport +_transport_registry["rest"] = DeploymentsRestTransport + +__all__ = ( + "DeploymentsTransport", + "DeploymentsGrpcTransport", + "DeploymentsGrpcAsyncIOTransport", + "DeploymentsRestTransport", + "DeploymentsRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/base.py new file mode 100644 index 000000000000..0e01fec9cdc0 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/base.py @@ -0,0 +1,225 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import deployment + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DeploymentsTransport(abc.ABC): + """Abstract transport class for Deployments.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_deployments: gapic_v1.method.wrap_method( + self.list_deployments, + default_timeout=None, + client_info=client_info, + ), + self.get_deployment: gapic_v1.method.wrap_method( + self.get_deployment, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_deployments( + self, + ) -> Callable[ + [deployment.ListDeploymentsRequest], + Union[ + deployment.ListDeploymentsResponse, + Awaitable[deployment.ListDeploymentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_deployment( + self, + ) -> Callable[ + [deployment.GetDeploymentRequest], + Union[deployment.Deployment, Awaitable[deployment.Deployment]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DeploymentsTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/grpc.py new file mode 100644 index 000000000000..8e598bd41aba --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/grpc.py @@ -0,0 +1,387 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import deployment + +from .base import DEFAULT_CLIENT_INFO, DeploymentsTransport + + +class DeploymentsGrpcTransport(DeploymentsTransport): + """gRPC backend transport for Deployments. + + Service for managing + [Deployments][google.cloud.dialogflow.cx.v3.Deployment]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_deployments( + self, + ) -> Callable[ + [deployment.ListDeploymentsRequest], deployment.ListDeploymentsResponse + ]: + r"""Return a callable for the list deployments method over gRPC. + + Returns the list of all deployments in the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.ListDeploymentsRequest], + ~.ListDeploymentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployments" not in self._stubs: + self._stubs["list_deployments"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Deployments/ListDeployments", + request_serializer=deployment.ListDeploymentsRequest.serialize, + response_deserializer=deployment.ListDeploymentsResponse.deserialize, + ) + return self._stubs["list_deployments"] + + @property + def get_deployment( + self, + ) -> Callable[[deployment.GetDeploymentRequest], deployment.Deployment]: + r"""Return a callable for the get deployment method over gRPC. + + Retrieves the specified + [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. + + Returns: + Callable[[~.GetDeploymentRequest], + ~.Deployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment" not in self._stubs: + self._stubs["get_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Deployments/GetDeployment", + request_serializer=deployment.GetDeploymentRequest.serialize, + response_deserializer=deployment.Deployment.deserialize, + ) + return self._stubs["get_deployment"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DeploymentsGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/grpc_asyncio.py new file mode 100644 index 000000000000..dad794bf6799 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/grpc_asyncio.py @@ -0,0 +1,387 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import deployment + +from .base import DEFAULT_CLIENT_INFO, DeploymentsTransport +from .grpc import DeploymentsGrpcTransport + + +class DeploymentsGrpcAsyncIOTransport(DeploymentsTransport): + """gRPC AsyncIO backend transport for Deployments. + + Service for managing + [Deployments][google.cloud.dialogflow.cx.v3.Deployment]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_deployments( + self, + ) -> Callable[ + [deployment.ListDeploymentsRequest], + Awaitable[deployment.ListDeploymentsResponse], + ]: + r"""Return a callable for the list deployments method over gRPC. + + Returns the list of all deployments in the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.ListDeploymentsRequest], + Awaitable[~.ListDeploymentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployments" not in self._stubs: + self._stubs["list_deployments"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Deployments/ListDeployments", + request_serializer=deployment.ListDeploymentsRequest.serialize, + response_deserializer=deployment.ListDeploymentsResponse.deserialize, + ) + return self._stubs["list_deployments"] + + @property + def get_deployment( + self, + ) -> Callable[[deployment.GetDeploymentRequest], Awaitable[deployment.Deployment]]: + r"""Return a callable for the get deployment method over gRPC. + + Retrieves the specified + [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. + + Returns: + Callable[[~.GetDeploymentRequest], + Awaitable[~.Deployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment" not in self._stubs: + self._stubs["get_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Deployments/GetDeployment", + request_serializer=deployment.GetDeploymentRequest.serialize, + response_deserializer=deployment.Deployment.deserialize, + ) + return self._stubs["get_deployment"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("DeploymentsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/rest.py new file mode 100644 index 000000000000..b0a59be1734b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/rest.py @@ -0,0 +1,902 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import deployment + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DeploymentsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DeploymentsRestInterceptor: + """Interceptor for Deployments. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DeploymentsRestTransport. + + .. code-block:: python + class MyCustomDeploymentsInterceptor(DeploymentsRestInterceptor): + def pre_get_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_deployments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deployments(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DeploymentsRestTransport(interceptor=MyCustomDeploymentsInterceptor()) + client = DeploymentsClient(transport=transport) + + + """ + + def pre_get_deployment( + self, + request: deployment.GetDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[deployment.GetDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Deployments server. + """ + return request, metadata + + def post_get_deployment( + self, response: deployment.Deployment + ) -> deployment.Deployment: + """Post-rpc interceptor for get_deployment + + Override in a subclass to manipulate the response + after it is returned by the Deployments server but before + it is returned to user code. + """ + return response + + def pre_list_deployments( + self, + request: deployment.ListDeploymentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[deployment.ListDeploymentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deployments + + Override in a subclass to manipulate the request or metadata + before they are sent to the Deployments server. + """ + return request, metadata + + def post_list_deployments( + self, response: deployment.ListDeploymentsResponse + ) -> deployment.ListDeploymentsResponse: + """Post-rpc interceptor for list_deployments + + Override in a subclass to manipulate the response + after it is returned by the Deployments server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Deployments server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Deployments server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Deployments server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Deployments server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Deployments server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Deployments server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Deployments server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Deployments server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Deployments server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Deployments server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DeploymentsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DeploymentsRestInterceptor + + +class DeploymentsRestTransport(DeploymentsTransport): + """REST backend transport for Deployments. + + Service for managing + [Deployments][google.cloud.dialogflow.cx.v3.Deployment]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DeploymentsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DeploymentsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetDeployment(DeploymentsRestStub): + def __hash__(self): + return hash("GetDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: deployment.GetDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> deployment.Deployment: + r"""Call the get deployment method over HTTP. + + Args: + request (~.deployment.GetDeploymentRequest): + The request object. The request message for + [Deployments.GetDeployment][google.cloud.dialogflow.cx.v3.Deployments.GetDeployment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.deployment.Deployment: + Represents a deployment in an + environment. A deployment happens when a + flow version configured to be active in + the environment. You can configure + running pre-deployment steps, e.g. + running validation test cases, + experiment auto-rollout, etc. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/environments/*/deployments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_deployment(request, metadata) + pb_request = deployment.GetDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = deployment.Deployment() + pb_resp = deployment.Deployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deployment(resp) + return resp + + class _ListDeployments(DeploymentsRestStub): + def __hash__(self): + return hash("ListDeployments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: deployment.ListDeploymentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> deployment.ListDeploymentsResponse: + r"""Call the list deployments method over HTTP. + + Args: + request (~.deployment.ListDeploymentsRequest): + The request object. The request message for + [Deployments.ListDeployments][google.cloud.dialogflow.cx.v3.Deployments.ListDeployments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.deployment.ListDeploymentsResponse: + The response message for + [Deployments.ListDeployments][google.cloud.dialogflow.cx.v3.Deployments.ListDeployments]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/environments/*}/deployments", + }, + ] + request, metadata = self._interceptor.pre_list_deployments( + request, metadata + ) + pb_request = deployment.ListDeploymentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = deployment.ListDeploymentsResponse() + pb_resp = deployment.ListDeploymentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deployments(resp) + return resp + + @property + def get_deployment( + self, + ) -> Callable[[deployment.GetDeploymentRequest], deployment.Deployment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_deployments( + self, + ) -> Callable[ + [deployment.ListDeploymentsRequest], deployment.ListDeploymentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeployments(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(DeploymentsRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(DeploymentsRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DeploymentsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DeploymentsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(DeploymentsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DeploymentsRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/__init__.py new file mode 100644 index 000000000000..0701a34b640e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import EntityTypesAsyncClient +from .client import EntityTypesClient + +__all__ = ( + "EntityTypesClient", + "EntityTypesAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py new file mode 100644 index 000000000000..fc17681df224 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/async_client.py @@ -0,0 +1,1150 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.entity_types import pagers +from google.cloud.dialogflowcx_v3.types import entity_type +from google.cloud.dialogflowcx_v3.types import entity_type as gcdc_entity_type + +from .client import EntityTypesClient +from .transports.base import DEFAULT_CLIENT_INFO, EntityTypesTransport +from .transports.grpc_asyncio import EntityTypesGrpcAsyncIOTransport + + +class EntityTypesAsyncClient: + """Service for managing + [EntityTypes][google.cloud.dialogflow.cx.v3.EntityType]. + """ + + _client: EntityTypesClient + + DEFAULT_ENDPOINT = EntityTypesClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = EntityTypesClient.DEFAULT_MTLS_ENDPOINT + + entity_type_path = staticmethod(EntityTypesClient.entity_type_path) + parse_entity_type_path = staticmethod(EntityTypesClient.parse_entity_type_path) + common_billing_account_path = staticmethod( + EntityTypesClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + EntityTypesClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(EntityTypesClient.common_folder_path) + parse_common_folder_path = staticmethod(EntityTypesClient.parse_common_folder_path) + common_organization_path = staticmethod(EntityTypesClient.common_organization_path) + parse_common_organization_path = staticmethod( + EntityTypesClient.parse_common_organization_path + ) + common_project_path = staticmethod(EntityTypesClient.common_project_path) + parse_common_project_path = staticmethod( + EntityTypesClient.parse_common_project_path + ) + common_location_path = staticmethod(EntityTypesClient.common_location_path) + parse_common_location_path = staticmethod( + EntityTypesClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EntityTypesAsyncClient: The constructed client. + """ + return EntityTypesClient.from_service_account_info.__func__(EntityTypesAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EntityTypesAsyncClient: The constructed client. + """ + return EntityTypesClient.from_service_account_file.__func__(EntityTypesAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return EntityTypesClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> EntityTypesTransport: + """Returns the transport used by the client instance. + + Returns: + EntityTypesTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(EntityTypesClient).get_transport_class, type(EntityTypesClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, EntityTypesTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the entity types client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.EntityTypesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = EntityTypesClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_entity_types( + self, + request: Optional[Union[entity_type.ListEntityTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntityTypesAsyncPager: + r"""Returns the list of all entity types in the specified + agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_entity_types(): + # Create a client + client = dialogflowcx_v3.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entity_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListEntityTypesRequest, dict]]): + The request object. The request message for + [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. + parent (:class:`str`): + Required. The agent to list all entity types for. + Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.entity_types.pagers.ListEntityTypesAsyncPager: + The response message for + [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = entity_type.ListEntityTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_entity_types, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntityTypesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entity_type( + self, + request: Optional[Union[entity_type.GetEntityTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_type.EntityType: + r"""Retrieves the specified entity type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_entity_type(): + # Create a client + client = dialogflowcx_v3.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetEntityTypeRequest, dict]]): + The request object. The request message for + [EntityTypes.GetEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.GetEntityType]. + name (:class:`str`): + Required. The name of the entity type. Format: + ``projects//locations//agents//entityTypes/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.EntityType: + Entities are extracted from user input and represent parameters that are + meaningful to your application. For example, a date + range, a proper name such as a geographic location or + landmark, and so on. Entities represent actionable + data for your application. + + When you define an entity, you can also include + synonyms that all map to that entity. For example, + "soft drink", "soda", "pop", and so on. + + There are three types of entities: + + - **System** - entities that are defined by the + Dialogflow API for common data types such as date, + time, currency, and so on. A system entity is + represented by the EntityType type. + - **Custom** - entities that are defined by you that + represent actionable data that is meaningful to + your application. For example, you could define a + pizza.sauce entity for red or white pizza sauce, a + pizza.cheese entity for the different types of + cheese on a pizza, a pizza.topping entity for + different toppings, and so on. A custom entity is + represented by the EntityType type. + - **User** - entities that are built for an + individual user such as favorites, preferences, + playlists, and so on. A user entity is represented + by the + [SessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityType] + type. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = entity_type.GetEntityTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_entity_type, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_entity_type( + self, + request: Optional[Union[gcdc_entity_type.CreateEntityTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + entity_type: Optional[gcdc_entity_type.EntityType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_entity_type.EntityType: + r"""Creates an entity type in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_entity_type(): + # Create a client + client = dialogflowcx_v3.EntityTypesAsyncClient() + + # Initialize request argument(s) + entity_type = dialogflowcx_v3.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflowcx_v3.CreateEntityTypeRequest( + parent="parent_value", + entity_type=entity_type, + ) + + # Make the request + response = await client.create_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateEntityTypeRequest, dict]]): + The request object. The request message for + [EntityTypes.CreateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.CreateEntityType]. + parent (:class:`str`): + Required. The agent to create a entity type for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity_type (:class:`google.cloud.dialogflowcx_v3.types.EntityType`): + Required. The entity type to create. + This corresponds to the ``entity_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.EntityType: + Entities are extracted from user input and represent parameters that are + meaningful to your application. For example, a date + range, a proper name such as a geographic location or + landmark, and so on. Entities represent actionable + data for your application. + + When you define an entity, you can also include + synonyms that all map to that entity. For example, + "soft drink", "soda", "pop", and so on. + + There are three types of entities: + + - **System** - entities that are defined by the + Dialogflow API for common data types such as date, + time, currency, and so on. A system entity is + represented by the EntityType type. + - **Custom** - entities that are defined by you that + represent actionable data that is meaningful to + your application. For example, you could define a + pizza.sauce entity for red or white pizza sauce, a + pizza.cheese entity for the different types of + cheese on a pizza, a pizza.topping entity for + different toppings, and so on. A custom entity is + represented by the EntityType type. + - **User** - entities that are built for an + individual user such as favorites, preferences, + playlists, and so on. A user entity is represented + by the + [SessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityType] + type. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entity_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_entity_type.CreateEntityTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity_type is not None: + request.entity_type = entity_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_entity_type, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_entity_type( + self, + request: Optional[Union[gcdc_entity_type.UpdateEntityTypeRequest, dict]] = None, + *, + entity_type: Optional[gcdc_entity_type.EntityType] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_entity_type.EntityType: + r"""Updates the specified entity type. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_entity_type(): + # Create a client + client = dialogflowcx_v3.EntityTypesAsyncClient() + + # Initialize request argument(s) + entity_type = dialogflowcx_v3.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflowcx_v3.UpdateEntityTypeRequest( + entity_type=entity_type, + ) + + # Make the request + response = await client.update_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateEntityTypeRequest, dict]]): + The request object. The request message for + [EntityTypes.UpdateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.UpdateEntityType]. + entity_type (:class:`google.cloud.dialogflowcx_v3.types.EntityType`): + Required. The entity type to update. + This corresponds to the ``entity_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.EntityType: + Entities are extracted from user input and represent parameters that are + meaningful to your application. For example, a date + range, a proper name such as a geographic location or + landmark, and so on. Entities represent actionable + data for your application. + + When you define an entity, you can also include + synonyms that all map to that entity. For example, + "soft drink", "soda", "pop", and so on. + + There are three types of entities: + + - **System** - entities that are defined by the + Dialogflow API for common data types such as date, + time, currency, and so on. A system entity is + represented by the EntityType type. + - **Custom** - entities that are defined by you that + represent actionable data that is meaningful to + your application. For example, you could define a + pizza.sauce entity for red or white pizza sauce, a + pizza.cheese entity for the different types of + cheese on a pizza, a pizza.topping entity for + different toppings, and so on. A custom entity is + represented by the EntityType type. + - **User** - entities that are built for an + individual user such as favorites, preferences, + playlists, and so on. A user entity is represented + by the + [SessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityType] + type. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entity_type, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_entity_type.UpdateEntityTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entity_type is not None: + request.entity_type = entity_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_entity_type, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("entity_type.name", request.entity_type.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entity_type( + self, + request: Optional[Union[entity_type.DeleteEntityTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified entity type. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_entity_type(): + # Create a client + client = dialogflowcx_v3.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteEntityTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_entity_type(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeleteEntityTypeRequest, dict]]): + The request object. The request message for + [EntityTypes.DeleteEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.DeleteEntityType]. + name (:class:`str`): + Required. The name of the entity type to delete. Format: + ``projects//locations//agents//entityTypes/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = entity_type.DeleteEntityTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_entity_type, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "EntityTypesAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EntityTypesAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py new file mode 100644 index 000000000000..e4bd256ff728 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py @@ -0,0 +1,1390 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.entity_types import pagers +from google.cloud.dialogflowcx_v3.types import entity_type +from google.cloud.dialogflowcx_v3.types import entity_type as gcdc_entity_type + +from .transports.base import DEFAULT_CLIENT_INFO, EntityTypesTransport +from .transports.grpc import EntityTypesGrpcTransport +from .transports.grpc_asyncio import EntityTypesGrpcAsyncIOTransport +from .transports.rest import EntityTypesRestTransport + + +class EntityTypesClientMeta(type): + """Metaclass for the EntityTypes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[EntityTypesTransport]] + _transport_registry["grpc"] = EntityTypesGrpcTransport + _transport_registry["grpc_asyncio"] = EntityTypesGrpcAsyncIOTransport + _transport_registry["rest"] = EntityTypesRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[EntityTypesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class EntityTypesClient(metaclass=EntityTypesClientMeta): + """Service for managing + [EntityTypes][google.cloud.dialogflow.cx.v3.EntityType]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EntityTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EntityTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> EntityTypesTransport: + """Returns the transport used by the client instance. + + Returns: + EntityTypesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def entity_type_path( + project: str, + location: str, + agent: str, + entity_type: str, + ) -> str: + """Returns a fully-qualified entity_type string.""" + return "projects/{project}/locations/{location}/agents/{agent}/entityTypes/{entity_type}".format( + project=project, + location=location, + agent=agent, + entity_type=entity_type, + ) + + @staticmethod + def parse_entity_type_path(path: str) -> Dict[str, str]: + """Parses a entity_type path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/entityTypes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, EntityTypesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the entity types client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, EntityTypesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, EntityTypesTransport): + # transport is a EntityTypesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_entity_types( + self, + request: Optional[Union[entity_type.ListEntityTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntityTypesPager: + r"""Returns the list of all entity types in the specified + agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_entity_types(): + # Create a client + client = dialogflowcx_v3.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entity_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListEntityTypesRequest, dict]): + The request object. The request message for + [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. + parent (str): + Required. The agent to list all entity types for. + Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.entity_types.pagers.ListEntityTypesPager: + The response message for + [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a entity_type.ListEntityTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, entity_type.ListEntityTypesRequest): + request = entity_type.ListEntityTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entity_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntityTypesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entity_type( + self, + request: Optional[Union[entity_type.GetEntityTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_type.EntityType: + r"""Retrieves the specified entity type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_entity_type(): + # Create a client + client = dialogflowcx_v3.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetEntityTypeRequest, dict]): + The request object. The request message for + [EntityTypes.GetEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.GetEntityType]. + name (str): + Required. The name of the entity type. Format: + ``projects//locations//agents//entityTypes/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.EntityType: + Entities are extracted from user input and represent parameters that are + meaningful to your application. For example, a date + range, a proper name such as a geographic location or + landmark, and so on. Entities represent actionable + data for your application. + + When you define an entity, you can also include + synonyms that all map to that entity. For example, + "soft drink", "soda", "pop", and so on. + + There are three types of entities: + + - **System** - entities that are defined by the + Dialogflow API for common data types such as date, + time, currency, and so on. A system entity is + represented by the EntityType type. + - **Custom** - entities that are defined by you that + represent actionable data that is meaningful to + your application. For example, you could define a + pizza.sauce entity for red or white pizza sauce, a + pizza.cheese entity for the different types of + cheese on a pizza, a pizza.topping entity for + different toppings, and so on. A custom entity is + represented by the EntityType type. + - **User** - entities that are built for an + individual user such as favorites, preferences, + playlists, and so on. A user entity is represented + by the + [SessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityType] + type. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a entity_type.GetEntityTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, entity_type.GetEntityTypeRequest): + request = entity_type.GetEntityTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entity_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_entity_type( + self, + request: Optional[Union[gcdc_entity_type.CreateEntityTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + entity_type: Optional[gcdc_entity_type.EntityType] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_entity_type.EntityType: + r"""Creates an entity type in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_entity_type(): + # Create a client + client = dialogflowcx_v3.EntityTypesClient() + + # Initialize request argument(s) + entity_type = dialogflowcx_v3.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflowcx_v3.CreateEntityTypeRequest( + parent="parent_value", + entity_type=entity_type, + ) + + # Make the request + response = client.create_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateEntityTypeRequest, dict]): + The request object. The request message for + [EntityTypes.CreateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.CreateEntityType]. + parent (str): + Required. The agent to create a entity type for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity_type (google.cloud.dialogflowcx_v3.types.EntityType): + Required. The entity type to create. + This corresponds to the ``entity_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.EntityType: + Entities are extracted from user input and represent parameters that are + meaningful to your application. For example, a date + range, a proper name such as a geographic location or + landmark, and so on. Entities represent actionable + data for your application. + + When you define an entity, you can also include + synonyms that all map to that entity. For example, + "soft drink", "soda", "pop", and so on. + + There are three types of entities: + + - **System** - entities that are defined by the + Dialogflow API for common data types such as date, + time, currency, and so on. A system entity is + represented by the EntityType type. + - **Custom** - entities that are defined by you that + represent actionable data that is meaningful to + your application. For example, you could define a + pizza.sauce entity for red or white pizza sauce, a + pizza.cheese entity for the different types of + cheese on a pizza, a pizza.topping entity for + different toppings, and so on. A custom entity is + represented by the EntityType type. + - **User** - entities that are built for an + individual user such as favorites, preferences, + playlists, and so on. A user entity is represented + by the + [SessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityType] + type. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entity_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_entity_type.CreateEntityTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_entity_type.CreateEntityTypeRequest): + request = gcdc_entity_type.CreateEntityTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity_type is not None: + request.entity_type = entity_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entity_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entity_type( + self, + request: Optional[Union[gcdc_entity_type.UpdateEntityTypeRequest, dict]] = None, + *, + entity_type: Optional[gcdc_entity_type.EntityType] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_entity_type.EntityType: + r"""Updates the specified entity type. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_entity_type(): + # Create a client + client = dialogflowcx_v3.EntityTypesClient() + + # Initialize request argument(s) + entity_type = dialogflowcx_v3.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflowcx_v3.UpdateEntityTypeRequest( + entity_type=entity_type, + ) + + # Make the request + response = client.update_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateEntityTypeRequest, dict]): + The request object. The request message for + [EntityTypes.UpdateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.UpdateEntityType]. + entity_type (google.cloud.dialogflowcx_v3.types.EntityType): + Required. The entity type to update. + This corresponds to the ``entity_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.EntityType: + Entities are extracted from user input and represent parameters that are + meaningful to your application. For example, a date + range, a proper name such as a geographic location or + landmark, and so on. Entities represent actionable + data for your application. + + When you define an entity, you can also include + synonyms that all map to that entity. For example, + "soft drink", "soda", "pop", and so on. + + There are three types of entities: + + - **System** - entities that are defined by the + Dialogflow API for common data types such as date, + time, currency, and so on. A system entity is + represented by the EntityType type. + - **Custom** - entities that are defined by you that + represent actionable data that is meaningful to + your application. For example, you could define a + pizza.sauce entity for red or white pizza sauce, a + pizza.cheese entity for the different types of + cheese on a pizza, a pizza.topping entity for + different toppings, and so on. A custom entity is + represented by the EntityType type. + - **User** - entities that are built for an + individual user such as favorites, preferences, + playlists, and so on. A user entity is represented + by the + [SessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityType] + type. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entity_type, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_entity_type.UpdateEntityTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_entity_type.UpdateEntityTypeRequest): + request = gcdc_entity_type.UpdateEntityTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entity_type is not None: + request.entity_type = entity_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entity_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("entity_type.name", request.entity_type.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entity_type( + self, + request: Optional[Union[entity_type.DeleteEntityTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified entity type. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_entity_type(): + # Create a client + client = dialogflowcx_v3.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteEntityTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_entity_type(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeleteEntityTypeRequest, dict]): + The request object. The request message for + [EntityTypes.DeleteEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.DeleteEntityType]. + name (str): + Required. The name of the entity type to delete. Format: + ``projects//locations//agents//entityTypes/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a entity_type.DeleteEntityTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, entity_type.DeleteEntityTypeRequest): + request = entity_type.DeleteEntityTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entity_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "EntityTypesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EntityTypesClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/pagers.py new file mode 100644 index 000000000000..7e2b4ddadb84 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import entity_type + + +class ListEntityTypesPager: + """A pager for iterating through ``list_entity_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListEntityTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entity_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntityTypes`` requests and continue to iterate + through the ``entity_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListEntityTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., entity_type.ListEntityTypesResponse], + request: entity_type.ListEntityTypesRequest, + response: entity_type.ListEntityTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListEntityTypesRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListEntityTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = entity_type.ListEntityTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[entity_type.ListEntityTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[entity_type.EntityType]: + for page in self.pages: + yield from page.entity_types + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEntityTypesAsyncPager: + """A pager for iterating through ``list_entity_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListEntityTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entity_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntityTypes`` requests and continue to iterate + through the ``entity_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListEntityTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[entity_type.ListEntityTypesResponse]], + request: entity_type.ListEntityTypesRequest, + response: entity_type.ListEntityTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListEntityTypesRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListEntityTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = entity_type.ListEntityTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[entity_type.ListEntityTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[entity_type.EntityType]: + async def async_generator(): + async for page in self.pages: + for response in page.entity_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/__init__.py new file mode 100644 index 000000000000..b8633c2ec7c7 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import EntityTypesTransport +from .grpc import EntityTypesGrpcTransport +from .grpc_asyncio import EntityTypesGrpcAsyncIOTransport +from .rest import EntityTypesRestInterceptor, EntityTypesRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[EntityTypesTransport]] +_transport_registry["grpc"] = EntityTypesGrpcTransport +_transport_registry["grpc_asyncio"] = EntityTypesGrpcAsyncIOTransport +_transport_registry["rest"] = EntityTypesRestTransport + +__all__ = ( + "EntityTypesTransport", + "EntityTypesGrpcTransport", + "EntityTypesGrpcAsyncIOTransport", + "EntityTypesRestTransport", + "EntityTypesRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/base.py new file mode 100644 index 000000000000..3ca1f90c92e4 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/base.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import entity_type +from google.cloud.dialogflowcx_v3.types import entity_type as gcdc_entity_type + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class EntityTypesTransport(abc.ABC): + """Abstract transport class for EntityTypes.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_entity_types: gapic_v1.method.wrap_method( + self.list_entity_types, + default_timeout=None, + client_info=client_info, + ), + self.get_entity_type: gapic_v1.method.wrap_method( + self.get_entity_type, + default_timeout=None, + client_info=client_info, + ), + self.create_entity_type: gapic_v1.method.wrap_method( + self.create_entity_type, + default_timeout=None, + client_info=client_info, + ), + self.update_entity_type: gapic_v1.method.wrap_method( + self.update_entity_type, + default_timeout=None, + client_info=client_info, + ), + self.delete_entity_type: gapic_v1.method.wrap_method( + self.delete_entity_type, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_entity_types( + self, + ) -> Callable[ + [entity_type.ListEntityTypesRequest], + Union[ + entity_type.ListEntityTypesResponse, + Awaitable[entity_type.ListEntityTypesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_entity_type( + self, + ) -> Callable[ + [entity_type.GetEntityTypeRequest], + Union[entity_type.EntityType, Awaitable[entity_type.EntityType]], + ]: + raise NotImplementedError() + + @property + def create_entity_type( + self, + ) -> Callable[ + [gcdc_entity_type.CreateEntityTypeRequest], + Union[gcdc_entity_type.EntityType, Awaitable[gcdc_entity_type.EntityType]], + ]: + raise NotImplementedError() + + @property + def update_entity_type( + self, + ) -> Callable[ + [gcdc_entity_type.UpdateEntityTypeRequest], + Union[gcdc_entity_type.EntityType, Awaitable[gcdc_entity_type.EntityType]], + ]: + raise NotImplementedError() + + @property + def delete_entity_type( + self, + ) -> Callable[ + [entity_type.DeleteEntityTypeRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("EntityTypesTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/grpc.py new file mode 100644 index 000000000000..693f1cd40742 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/grpc.py @@ -0,0 +1,482 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import entity_type +from google.cloud.dialogflowcx_v3.types import entity_type as gcdc_entity_type + +from .base import DEFAULT_CLIENT_INFO, EntityTypesTransport + + +class EntityTypesGrpcTransport(EntityTypesTransport): + """gRPC backend transport for EntityTypes. + + Service for managing + [EntityTypes][google.cloud.dialogflow.cx.v3.EntityType]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_entity_types( + self, + ) -> Callable[ + [entity_type.ListEntityTypesRequest], entity_type.ListEntityTypesResponse + ]: + r"""Return a callable for the list entity types method over gRPC. + + Returns the list of all entity types in the specified + agent. + + Returns: + Callable[[~.ListEntityTypesRequest], + ~.ListEntityTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_entity_types" not in self._stubs: + self._stubs["list_entity_types"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.EntityTypes/ListEntityTypes", + request_serializer=entity_type.ListEntityTypesRequest.serialize, + response_deserializer=entity_type.ListEntityTypesResponse.deserialize, + ) + return self._stubs["list_entity_types"] + + @property + def get_entity_type( + self, + ) -> Callable[[entity_type.GetEntityTypeRequest], entity_type.EntityType]: + r"""Return a callable for the get entity type method over gRPC. + + Retrieves the specified entity type. + + Returns: + Callable[[~.GetEntityTypeRequest], + ~.EntityType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_entity_type" not in self._stubs: + self._stubs["get_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.EntityTypes/GetEntityType", + request_serializer=entity_type.GetEntityTypeRequest.serialize, + response_deserializer=entity_type.EntityType.deserialize, + ) + return self._stubs["get_entity_type"] + + @property + def create_entity_type( + self, + ) -> Callable[ + [gcdc_entity_type.CreateEntityTypeRequest], gcdc_entity_type.EntityType + ]: + r"""Return a callable for the create entity type method over gRPC. + + Creates an entity type in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreateEntityTypeRequest], + ~.EntityType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_entity_type" not in self._stubs: + self._stubs["create_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.EntityTypes/CreateEntityType", + request_serializer=gcdc_entity_type.CreateEntityTypeRequest.serialize, + response_deserializer=gcdc_entity_type.EntityType.deserialize, + ) + return self._stubs["create_entity_type"] + + @property + def update_entity_type( + self, + ) -> Callable[ + [gcdc_entity_type.UpdateEntityTypeRequest], gcdc_entity_type.EntityType + ]: + r"""Return a callable for the update entity type method over gRPC. + + Updates the specified entity type. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdateEntityTypeRequest], + ~.EntityType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_entity_type" not in self._stubs: + self._stubs["update_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.EntityTypes/UpdateEntityType", + request_serializer=gcdc_entity_type.UpdateEntityTypeRequest.serialize, + response_deserializer=gcdc_entity_type.EntityType.deserialize, + ) + return self._stubs["update_entity_type"] + + @property + def delete_entity_type( + self, + ) -> Callable[[entity_type.DeleteEntityTypeRequest], empty_pb2.Empty]: + r"""Return a callable for the delete entity type method over gRPC. + + Deletes the specified entity type. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.DeleteEntityTypeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_entity_type" not in self._stubs: + self._stubs["delete_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.EntityTypes/DeleteEntityType", + request_serializer=entity_type.DeleteEntityTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_entity_type"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("EntityTypesGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/grpc_asyncio.py new file mode 100644 index 000000000000..44bdad96f333 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/grpc_asyncio.py @@ -0,0 +1,486 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import entity_type +from google.cloud.dialogflowcx_v3.types import entity_type as gcdc_entity_type + +from .base import DEFAULT_CLIENT_INFO, EntityTypesTransport +from .grpc import EntityTypesGrpcTransport + + +class EntityTypesGrpcAsyncIOTransport(EntityTypesTransport): + """gRPC AsyncIO backend transport for EntityTypes. + + Service for managing + [EntityTypes][google.cloud.dialogflow.cx.v3.EntityType]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_entity_types( + self, + ) -> Callable[ + [entity_type.ListEntityTypesRequest], + Awaitable[entity_type.ListEntityTypesResponse], + ]: + r"""Return a callable for the list entity types method over gRPC. + + Returns the list of all entity types in the specified + agent. + + Returns: + Callable[[~.ListEntityTypesRequest], + Awaitable[~.ListEntityTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_entity_types" not in self._stubs: + self._stubs["list_entity_types"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.EntityTypes/ListEntityTypes", + request_serializer=entity_type.ListEntityTypesRequest.serialize, + response_deserializer=entity_type.ListEntityTypesResponse.deserialize, + ) + return self._stubs["list_entity_types"] + + @property + def get_entity_type( + self, + ) -> Callable[ + [entity_type.GetEntityTypeRequest], Awaitable[entity_type.EntityType] + ]: + r"""Return a callable for the get entity type method over gRPC. + + Retrieves the specified entity type. + + Returns: + Callable[[~.GetEntityTypeRequest], + Awaitable[~.EntityType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_entity_type" not in self._stubs: + self._stubs["get_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.EntityTypes/GetEntityType", + request_serializer=entity_type.GetEntityTypeRequest.serialize, + response_deserializer=entity_type.EntityType.deserialize, + ) + return self._stubs["get_entity_type"] + + @property + def create_entity_type( + self, + ) -> Callable[ + [gcdc_entity_type.CreateEntityTypeRequest], + Awaitable[gcdc_entity_type.EntityType], + ]: + r"""Return a callable for the create entity type method over gRPC. + + Creates an entity type in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreateEntityTypeRequest], + Awaitable[~.EntityType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_entity_type" not in self._stubs: + self._stubs["create_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.EntityTypes/CreateEntityType", + request_serializer=gcdc_entity_type.CreateEntityTypeRequest.serialize, + response_deserializer=gcdc_entity_type.EntityType.deserialize, + ) + return self._stubs["create_entity_type"] + + @property + def update_entity_type( + self, + ) -> Callable[ + [gcdc_entity_type.UpdateEntityTypeRequest], + Awaitable[gcdc_entity_type.EntityType], + ]: + r"""Return a callable for the update entity type method over gRPC. + + Updates the specified entity type. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdateEntityTypeRequest], + Awaitable[~.EntityType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_entity_type" not in self._stubs: + self._stubs["update_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.EntityTypes/UpdateEntityType", + request_serializer=gcdc_entity_type.UpdateEntityTypeRequest.serialize, + response_deserializer=gcdc_entity_type.EntityType.deserialize, + ) + return self._stubs["update_entity_type"] + + @property + def delete_entity_type( + self, + ) -> Callable[[entity_type.DeleteEntityTypeRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete entity type method over gRPC. + + Deletes the specified entity type. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.DeleteEntityTypeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_entity_type" not in self._stubs: + self._stubs["delete_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.EntityTypes/DeleteEntityType", + request_serializer=entity_type.DeleteEntityTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_entity_type"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("EntityTypesGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/rest.py new file mode 100644 index 000000000000..dd421a203763 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/rest.py @@ -0,0 +1,1381 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import entity_type +from google.cloud.dialogflowcx_v3.types import entity_type as gcdc_entity_type + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import EntityTypesTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class EntityTypesRestInterceptor: + """Interceptor for EntityTypes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the EntityTypesRestTransport. + + .. code-block:: python + class MyCustomEntityTypesInterceptor(EntityTypesRestInterceptor): + def pre_create_entity_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entity_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_entity_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_entity_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entity_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entity_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entity_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_entity_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_entity_type(self, response): + logging.log(f"Received response: {response}") + return response + + transport = EntityTypesRestTransport(interceptor=MyCustomEntityTypesInterceptor()) + client = EntityTypesClient(transport=transport) + + + """ + + def pre_create_entity_type( + self, + request: gcdc_entity_type.CreateEntityTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_entity_type.CreateEntityTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_entity_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntityTypes server. + """ + return request, metadata + + def post_create_entity_type( + self, response: gcdc_entity_type.EntityType + ) -> gcdc_entity_type.EntityType: + """Post-rpc interceptor for create_entity_type + + Override in a subclass to manipulate the response + after it is returned by the EntityTypes server but before + it is returned to user code. + """ + return response + + def pre_delete_entity_type( + self, + request: entity_type.DeleteEntityTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[entity_type.DeleteEntityTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_entity_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntityTypes server. + """ + return request, metadata + + def pre_get_entity_type( + self, + request: entity_type.GetEntityTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[entity_type.GetEntityTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_entity_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntityTypes server. + """ + return request, metadata + + def post_get_entity_type( + self, response: entity_type.EntityType + ) -> entity_type.EntityType: + """Post-rpc interceptor for get_entity_type + + Override in a subclass to manipulate the response + after it is returned by the EntityTypes server but before + it is returned to user code. + """ + return response + + def pre_list_entity_types( + self, + request: entity_type.ListEntityTypesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[entity_type.ListEntityTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_entity_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntityTypes server. + """ + return request, metadata + + def post_list_entity_types( + self, response: entity_type.ListEntityTypesResponse + ) -> entity_type.ListEntityTypesResponse: + """Post-rpc interceptor for list_entity_types + + Override in a subclass to manipulate the response + after it is returned by the EntityTypes server but before + it is returned to user code. + """ + return response + + def pre_update_entity_type( + self, + request: gcdc_entity_type.UpdateEntityTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_entity_type.UpdateEntityTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_entity_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntityTypes server. + """ + return request, metadata + + def post_update_entity_type( + self, response: gcdc_entity_type.EntityType + ) -> gcdc_entity_type.EntityType: + """Post-rpc interceptor for update_entity_type + + Override in a subclass to manipulate the response + after it is returned by the EntityTypes server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntityTypes server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the EntityTypes server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntityTypes server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the EntityTypes server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntityTypes server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the EntityTypes server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntityTypes server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the EntityTypes server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntityTypes server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the EntityTypes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class EntityTypesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: EntityTypesRestInterceptor + + +class EntityTypesRestTransport(EntityTypesTransport): + """REST backend transport for EntityTypes. + + Service for managing + [EntityTypes][google.cloud.dialogflow.cx.v3.EntityType]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[EntityTypesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or EntityTypesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateEntityType(EntityTypesRestStub): + def __hash__(self): + return hash("CreateEntityType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_entity_type.CreateEntityTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_entity_type.EntityType: + r"""Call the create entity type method over HTTP. + + Args: + request (~.gcdc_entity_type.CreateEntityTypeRequest): + The request object. The request message for + [EntityTypes.CreateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.CreateEntityType]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_entity_type.EntityType: + Entities are extracted from user input and represent + parameters that are meaningful to your application. For + example, a date range, a proper name such as a + geographic location or landmark, and so on. Entities + represent actionable data for your application. + + When you define an entity, you can also include synonyms + that all map to that entity. For example, "soft drink", + "soda", "pop", and so on. + + There are three types of entities: + + - **System** - entities that are defined by the + Dialogflow API for common data types such as date, + time, currency, and so on. A system entity is + represented by the ``EntityType`` type. + + - **Custom** - entities that are defined by you that + represent actionable data that is meaningful to your + application. For example, you could define a + ``pizza.sauce`` entity for red or white pizza sauce, + a ``pizza.cheese`` entity for the different types of + cheese on a pizza, a ``pizza.topping`` entity for + different toppings, and so on. A custom entity is + represented by the ``EntityType`` type. + + - **User** - entities that are built for an individual + user such as favorites, preferences, playlists, and + so on. A user entity is represented by the + [SessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityType] + type. + + For more information about entity types, see the + `Dialogflow + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/entityTypes", + "body": "entity_type", + }, + ] + request, metadata = self._interceptor.pre_create_entity_type( + request, metadata + ) + pb_request = gcdc_entity_type.CreateEntityTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_entity_type.EntityType() + pb_resp = gcdc_entity_type.EntityType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_entity_type(resp) + return resp + + class _DeleteEntityType(EntityTypesRestStub): + def __hash__(self): + return hash("DeleteEntityType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_type.DeleteEntityTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete entity type method over HTTP. + + Args: + request (~.entity_type.DeleteEntityTypeRequest): + The request object. The request message for + [EntityTypes.DeleteEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.DeleteEntityType]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/entityTypes/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_entity_type( + request, metadata + ) + pb_request = entity_type.DeleteEntityTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetEntityType(EntityTypesRestStub): + def __hash__(self): + return hash("GetEntityType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_type.GetEntityTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_type.EntityType: + r"""Call the get entity type method over HTTP. + + Args: + request (~.entity_type.GetEntityTypeRequest): + The request object. The request message for + [EntityTypes.GetEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.GetEntityType]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_type.EntityType: + Entities are extracted from user input and represent + parameters that are meaningful to your application. For + example, a date range, a proper name such as a + geographic location or landmark, and so on. Entities + represent actionable data for your application. + + When you define an entity, you can also include synonyms + that all map to that entity. For example, "soft drink", + "soda", "pop", and so on. + + There are three types of entities: + + - **System** - entities that are defined by the + Dialogflow API for common data types such as date, + time, currency, and so on. A system entity is + represented by the ``EntityType`` type. + + - **Custom** - entities that are defined by you that + represent actionable data that is meaningful to your + application. For example, you could define a + ``pizza.sauce`` entity for red or white pizza sauce, + a ``pizza.cheese`` entity for the different types of + cheese on a pizza, a ``pizza.topping`` entity for + different toppings, and so on. A custom entity is + represented by the ``EntityType`` type. + + - **User** - entities that are built for an individual + user such as favorites, preferences, playlists, and + so on. A user entity is represented by the + [SessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityType] + type. + + For more information about entity types, see the + `Dialogflow + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/entityTypes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_entity_type(request, metadata) + pb_request = entity_type.GetEntityTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_type.EntityType() + pb_resp = entity_type.EntityType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_entity_type(resp) + return resp + + class _ListEntityTypes(EntityTypesRestStub): + def __hash__(self): + return hash("ListEntityTypes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_type.ListEntityTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_type.ListEntityTypesResponse: + r"""Call the list entity types method over HTTP. + + Args: + request (~.entity_type.ListEntityTypesRequest): + The request object. The request message for + [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_type.ListEntityTypesResponse: + The response message for + [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/entityTypes", + }, + ] + request, metadata = self._interceptor.pre_list_entity_types( + request, metadata + ) + pb_request = entity_type.ListEntityTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_type.ListEntityTypesResponse() + pb_resp = entity_type.ListEntityTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_entity_types(resp) + return resp + + class _UpdateEntityType(EntityTypesRestStub): + def __hash__(self): + return hash("UpdateEntityType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_entity_type.UpdateEntityTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_entity_type.EntityType: + r"""Call the update entity type method over HTTP. + + Args: + request (~.gcdc_entity_type.UpdateEntityTypeRequest): + The request object. The request message for + [EntityTypes.UpdateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.UpdateEntityType]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_entity_type.EntityType: + Entities are extracted from user input and represent + parameters that are meaningful to your application. For + example, a date range, a proper name such as a + geographic location or landmark, and so on. Entities + represent actionable data for your application. + + When you define an entity, you can also include synonyms + that all map to that entity. For example, "soft drink", + "soda", "pop", and so on. + + There are three types of entities: + + - **System** - entities that are defined by the + Dialogflow API for common data types such as date, + time, currency, and so on. A system entity is + represented by the ``EntityType`` type. + + - **Custom** - entities that are defined by you that + represent actionable data that is meaningful to your + application. For example, you could define a + ``pizza.sauce`` entity for red or white pizza sauce, + a ``pizza.cheese`` entity for the different types of + cheese on a pizza, a ``pizza.topping`` entity for + different toppings, and so on. A custom entity is + represented by the ``EntityType`` type. + + - **User** - entities that are built for an individual + user such as favorites, preferences, playlists, and + so on. A user entity is represented by the + [SessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityType] + type. + + For more information about entity types, see the + `Dialogflow + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{entity_type.name=projects/*/locations/*/agents/*/entityTypes/*}", + "body": "entity_type", + }, + ] + request, metadata = self._interceptor.pre_update_entity_type( + request, metadata + ) + pb_request = gcdc_entity_type.UpdateEntityTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_entity_type.EntityType() + pb_resp = gcdc_entity_type.EntityType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_entity_type(resp) + return resp + + @property + def create_entity_type( + self, + ) -> Callable[ + [gcdc_entity_type.CreateEntityTypeRequest], gcdc_entity_type.EntityType + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntityType(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_entity_type( + self, + ) -> Callable[[entity_type.DeleteEntityTypeRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEntityType(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_entity_type( + self, + ) -> Callable[[entity_type.GetEntityTypeRequest], entity_type.EntityType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntityType(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entity_types( + self, + ) -> Callable[ + [entity_type.ListEntityTypesRequest], entity_type.ListEntityTypesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntityTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_entity_type( + self, + ) -> Callable[ + [gcdc_entity_type.UpdateEntityTypeRequest], gcdc_entity_type.EntityType + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEntityType(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(EntityTypesRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(EntityTypesRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(EntityTypesRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(EntityTypesRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(EntityTypesRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("EntityTypesRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/__init__.py new file mode 100644 index 000000000000..46bee45be957 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import EnvironmentsAsyncClient +from .client import EnvironmentsClient + +__all__ = ( + "EnvironmentsClient", + "EnvironmentsAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py new file mode 100644 index 000000000000..e91e4628246a --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/async_client.py @@ -0,0 +1,1611 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.environments import pagers +from google.cloud.dialogflowcx_v3.types import environment +from google.cloud.dialogflowcx_v3.types import environment as gcdc_environment + +from .client import EnvironmentsClient +from .transports.base import DEFAULT_CLIENT_INFO, EnvironmentsTransport +from .transports.grpc_asyncio import EnvironmentsGrpcAsyncIOTransport + + +class EnvironmentsAsyncClient: + """Service for managing + [Environments][google.cloud.dialogflow.cx.v3.Environment]. + """ + + _client: EnvironmentsClient + + DEFAULT_ENDPOINT = EnvironmentsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = EnvironmentsClient.DEFAULT_MTLS_ENDPOINT + + continuous_test_result_path = staticmethod( + EnvironmentsClient.continuous_test_result_path + ) + parse_continuous_test_result_path = staticmethod( + EnvironmentsClient.parse_continuous_test_result_path + ) + environment_path = staticmethod(EnvironmentsClient.environment_path) + parse_environment_path = staticmethod(EnvironmentsClient.parse_environment_path) + service_path = staticmethod(EnvironmentsClient.service_path) + parse_service_path = staticmethod(EnvironmentsClient.parse_service_path) + test_case_path = staticmethod(EnvironmentsClient.test_case_path) + parse_test_case_path = staticmethod(EnvironmentsClient.parse_test_case_path) + test_case_result_path = staticmethod(EnvironmentsClient.test_case_result_path) + parse_test_case_result_path = staticmethod( + EnvironmentsClient.parse_test_case_result_path + ) + version_path = staticmethod(EnvironmentsClient.version_path) + parse_version_path = staticmethod(EnvironmentsClient.parse_version_path) + webhook_path = staticmethod(EnvironmentsClient.webhook_path) + parse_webhook_path = staticmethod(EnvironmentsClient.parse_webhook_path) + common_billing_account_path = staticmethod( + EnvironmentsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + EnvironmentsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(EnvironmentsClient.common_folder_path) + parse_common_folder_path = staticmethod(EnvironmentsClient.parse_common_folder_path) + common_organization_path = staticmethod(EnvironmentsClient.common_organization_path) + parse_common_organization_path = staticmethod( + EnvironmentsClient.parse_common_organization_path + ) + common_project_path = staticmethod(EnvironmentsClient.common_project_path) + parse_common_project_path = staticmethod( + EnvironmentsClient.parse_common_project_path + ) + common_location_path = staticmethod(EnvironmentsClient.common_location_path) + parse_common_location_path = staticmethod( + EnvironmentsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EnvironmentsAsyncClient: The constructed client. + """ + return EnvironmentsClient.from_service_account_info.__func__(EnvironmentsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EnvironmentsAsyncClient: The constructed client. + """ + return EnvironmentsClient.from_service_account_file.__func__(EnvironmentsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return EnvironmentsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> EnvironmentsTransport: + """Returns the transport used by the client instance. + + Returns: + EnvironmentsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(EnvironmentsClient).get_transport_class, type(EnvironmentsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, EnvironmentsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the environments client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.EnvironmentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = EnvironmentsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_environments( + self, + request: Optional[Union[environment.ListEnvironmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnvironmentsAsyncPager: + r"""Returns the list of all environments in the specified + [Agent][google.cloud.dialogflow.cx.v3.Agent]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_environments(): + # Create a client + client = dialogflowcx_v3.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListEnvironmentsRequest, dict]]): + The request object. The request message for + [Environments.ListEnvironments][google.cloud.dialogflow.cx.v3.Environments.ListEnvironments]. + parent (:class:`str`): + Required. The + [Agent][google.cloud.dialogflow.cx.v3.Agent] to list all + environments for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.environments.pagers.ListEnvironmentsAsyncPager: + The response message for + [Environments.ListEnvironments][google.cloud.dialogflow.cx.v3.Environments.ListEnvironments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = environment.ListEnvironmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_environments, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEnvironmentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_environment( + self, + request: Optional[Union[environment.GetEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environment.Environment: + r"""Retrieves the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_environment(): + # Create a client + client = dialogflowcx_v3.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_environment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetEnvironmentRequest, dict]]): + The request object. The request message for + [Environments.GetEnvironment][google.cloud.dialogflow.cx.v3.Environments.GetEnvironment]. + name (:class:`str`): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Environment: + Represents an environment for an + agent. You can create multiple versions + of your agent and publish them to + separate environments. When you edit an + agent, you are editing the draft agent. + At any point, you can save the draft + agent as an agent version, which is an + immutable snapshot of your agent. When + you save the draft agent, it is + published to the default environment. + When you create agent versions, you can + publish them to custom environments. You + can create a variety of custom + environments for testing, development, + production, etc. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = environment.GetEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_environment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_environment( + self, + request: Optional[ + Union[gcdc_environment.CreateEnvironmentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + environment: Optional[gcdc_environment.Environment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an + [Environment][google.cloud.dialogflow.cx.v3.Environment] in the + specified [Agent][google.cloud.dialogflow.cx.v3.Agent]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [Environment][google.cloud.dialogflow.cx.v3.Environment] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_environment(): + # Create a client + client = dialogflowcx_v3.EnvironmentsAsyncClient() + + # Initialize request argument(s) + environment = dialogflowcx_v3.Environment() + environment.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateEnvironmentRequest( + parent="parent_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateEnvironmentRequest, dict]]): + The request object. The request message for + [Environments.CreateEnvironment][google.cloud.dialogflow.cx.v3.Environments.CreateEnvironment]. + parent (:class:`str`): + Required. The + [Agent][google.cloud.dialogflow.cx.v3.Agent] to create + an + [Environment][google.cloud.dialogflow.cx.v3.Environment] + for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment (:class:`google.cloud.dialogflowcx_v3.types.Environment`): + Required. The environment to create. + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.Environment` Represents an environment for an agent. You can create multiple versions + of your agent and publish them to separate + environments. When you edit an agent, you are editing + the draft agent. At any point, you can save the draft + agent as an agent version, which is an immutable + snapshot of your agent. When you save the draft + agent, it is published to the default environment. + When you create agent versions, you can publish them + to custom environments. You can create a variety of + custom environments for testing, development, + production, etc. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, environment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_environment.CreateEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if environment is not None: + request.environment = environment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_environment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcdc_environment.Environment, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + async def update_environment( + self, + request: Optional[ + Union[gcdc_environment.UpdateEnvironmentRequest, dict] + ] = None, + *, + environment: Optional[gcdc_environment.Environment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [Environment][google.cloud.dialogflow.cx.v3.Environment] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_environment(): + # Create a client + client = dialogflowcx_v3.EnvironmentsAsyncClient() + + # Initialize request argument(s) + environment = dialogflowcx_v3.Environment() + environment.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateEnvironmentRequest, dict]]): + The request object. The request message for + [Environments.UpdateEnvironment][google.cloud.dialogflow.cx.v3.Environments.UpdateEnvironment]. + environment (:class:`google.cloud.dialogflowcx_v3.types.Environment`): + Required. The environment to update. + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The mask to control which + fields get updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.Environment` Represents an environment for an agent. You can create multiple versions + of your agent and publish them to separate + environments. When you edit an agent, you are editing + the draft agent. At any point, you can save the draft + agent as an agent version, which is an immutable + snapshot of your agent. When you save the draft + agent, it is published to the default environment. + When you create agent versions, you can publish them + to custom environments. You can create a variety of + custom environments for testing, development, + production, etc. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([environment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_environment.UpdateEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if environment is not None: + request.environment = environment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_environment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment.name", request.environment.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcdc_environment.Environment, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + async def delete_environment( + self, + request: Optional[Union[environment.DeleteEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_environment(): + # Create a client + client = dialogflowcx_v3.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + await client.delete_environment(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeleteEnvironmentRequest, dict]]): + The request object. The request message for + [Environments.DeleteEnvironment][google.cloud.dialogflow.cx.v3.Environments.DeleteEnvironment]. + name (:class:`str`): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment] + to delete. Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = environment.DeleteEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_environment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def lookup_environment_history( + self, + request: Optional[ + Union[environment.LookupEnvironmentHistoryRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.LookupEnvironmentHistoryAsyncPager: + r"""Looks up the history of the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_lookup_environment_history(): + # Create a client + client = dialogflowcx_v3.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.LookupEnvironmentHistoryRequest( + name="name_value", + ) + + # Make the request + page_result = client.lookup_environment_history(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.LookupEnvironmentHistoryRequest, dict]]): + The request object. The request message for + [Environments.LookupEnvironmentHistory][google.cloud.dialogflow.cx.v3.Environments.LookupEnvironmentHistory]. + name (:class:`str`): + Required. Resource name of the environment to look up + the history for. Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.environments.pagers.LookupEnvironmentHistoryAsyncPager: + The response message for + [Environments.LookupEnvironmentHistory][google.cloud.dialogflow.cx.v3.Environments.LookupEnvironmentHistory]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = environment.LookupEnvironmentHistoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.lookup_environment_history, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.LookupEnvironmentHistoryAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_continuous_test( + self, + request: Optional[Union[environment.RunContinuousTestRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Kicks off a continuous test under the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [RunContinuousTestMetadata][google.cloud.dialogflow.cx.v3.RunContinuousTestMetadata] + - ``response``: + [RunContinuousTestResponse][google.cloud.dialogflow.cx.v3.RunContinuousTestResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_run_continuous_test(): + # Create a client + client = dialogflowcx_v3.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.RunContinuousTestRequest( + environment="environment_value", + ) + + # Make the request + operation = client.run_continuous_test(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.RunContinuousTestRequest, dict]]): + The request object. The request message for + [Environments.RunContinuousTest][google.cloud.dialogflow.cx.v3.Environments.RunContinuousTest]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.RunContinuousTestResponse` The response message for + [Environments.RunContinuousTest][google.cloud.dialogflow.cx.v3.Environments.RunContinuousTest]. + + """ + # Create or coerce a protobuf request object. + request = environment.RunContinuousTestRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_continuous_test, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environment.RunContinuousTestResponse, + metadata_type=environment.RunContinuousTestMetadata, + ) + + # Done; return the response. + return response + + async def list_continuous_test_results( + self, + request: Optional[ + Union[environment.ListContinuousTestResultsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListContinuousTestResultsAsyncPager: + r"""Fetches a list of continuous test results for a given + environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_continuous_test_results(): + # Create a client + client = dialogflowcx_v3.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListContinuousTestResultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_continuous_test_results(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListContinuousTestResultsRequest, dict]]): + The request object. The request message for + [Environments.ListContinuousTestResults][google.cloud.dialogflow.cx.v3.Environments.ListContinuousTestResults]. + parent (:class:`str`): + Required. The environment to list results for. Format: + ``projects//locations//agents// environments/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.environments.pagers.ListContinuousTestResultsAsyncPager: + The response message for + [Environments.ListTestCaseResults][]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = environment.ListContinuousTestResultsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_continuous_test_results, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListContinuousTestResultsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def deploy_flow( + self, + request: Optional[Union[environment.DeployFlowRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deploys a flow to the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [DeployFlowMetadata][google.cloud.dialogflow.cx.v3.DeployFlowMetadata] + - ``response``: + [DeployFlowResponse][google.cloud.dialogflow.cx.v3.DeployFlowResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_deploy_flow(): + # Create a client + client = dialogflowcx_v3.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeployFlowRequest( + environment="environment_value", + flow_version="flow_version_value", + ) + + # Make the request + operation = client.deploy_flow(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeployFlowRequest, dict]]): + The request object. The request message for + [Environments.DeployFlow][google.cloud.dialogflow.cx.v3.Environments.DeployFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.DeployFlowResponse` The response message for + [Environments.DeployFlow][google.cloud.dialogflow.cx.v3.Environments.DeployFlow]. + + """ + # Create or coerce a protobuf request object. + request = environment.DeployFlowRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.deploy_flow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environment.DeployFlowResponse, + metadata_type=environment.DeployFlowMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "EnvironmentsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EnvironmentsAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py new file mode 100644 index 000000000000..057f6ce6793c --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py @@ -0,0 +1,1989 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.environments import pagers +from google.cloud.dialogflowcx_v3.types import environment +from google.cloud.dialogflowcx_v3.types import environment as gcdc_environment + +from .transports.base import DEFAULT_CLIENT_INFO, EnvironmentsTransport +from .transports.grpc import EnvironmentsGrpcTransport +from .transports.grpc_asyncio import EnvironmentsGrpcAsyncIOTransport +from .transports.rest import EnvironmentsRestTransport + + +class EnvironmentsClientMeta(type): + """Metaclass for the Environments client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[EnvironmentsTransport]] + _transport_registry["grpc"] = EnvironmentsGrpcTransport + _transport_registry["grpc_asyncio"] = EnvironmentsGrpcAsyncIOTransport + _transport_registry["rest"] = EnvironmentsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[EnvironmentsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class EnvironmentsClient(metaclass=EnvironmentsClientMeta): + """Service for managing + [Environments][google.cloud.dialogflow.cx.v3.Environment]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EnvironmentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EnvironmentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> EnvironmentsTransport: + """Returns the transport used by the client instance. + + Returns: + EnvironmentsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def continuous_test_result_path( + project: str, + location: str, + agent: str, + environment: str, + continuous_test_result: str, + ) -> str: + """Returns a fully-qualified continuous_test_result string.""" + return "projects/{project}/locations/{location}/agents/{agent}/environments/{environment}/continuousTestResults/{continuous_test_result}".format( + project=project, + location=location, + agent=agent, + environment=environment, + continuous_test_result=continuous_test_result, + ) + + @staticmethod + def parse_continuous_test_result_path(path: str) -> Dict[str, str]: + """Parses a continuous_test_result path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/environments/(?P.+?)/continuousTestResults/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def environment_path( + project: str, + location: str, + agent: str, + environment: str, + ) -> str: + """Returns a fully-qualified environment string.""" + return "projects/{project}/locations/{location}/agents/{agent}/environments/{environment}".format( + project=project, + location=location, + agent=agent, + environment=environment, + ) + + @staticmethod + def parse_environment_path(path: str) -> Dict[str, str]: + """Parses a environment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/environments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def service_path( + project: str, + location: str, + namespace: str, + service: str, + ) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}".format( + project=project, + location=location, + namespace=namespace, + service=service, + ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str, str]: + """Parses a service path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/namespaces/(?P.+?)/services/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def test_case_path( + project: str, + location: str, + agent: str, + test_case: str, + ) -> str: + """Returns a fully-qualified test_case string.""" + return "projects/{project}/locations/{location}/agents/{agent}/testCases/{test_case}".format( + project=project, + location=location, + agent=agent, + test_case=test_case, + ) + + @staticmethod + def parse_test_case_path(path: str) -> Dict[str, str]: + """Parses a test_case path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/testCases/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def test_case_result_path( + project: str, + location: str, + agent: str, + test_case: str, + result: str, + ) -> str: + """Returns a fully-qualified test_case_result string.""" + return "projects/{project}/locations/{location}/agents/{agent}/testCases/{test_case}/results/{result}".format( + project=project, + location=location, + agent=agent, + test_case=test_case, + result=result, + ) + + @staticmethod + def parse_test_case_result_path(path: str) -> Dict[str, str]: + """Parses a test_case_result path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/testCases/(?P.+?)/results/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def version_path( + project: str, + location: str, + agent: str, + flow: str, + version: str, + ) -> str: + """Returns a fully-qualified version string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/versions/{version}".format( + project=project, + location=location, + agent=agent, + flow=flow, + version=version, + ) + + @staticmethod + def parse_version_path(path: str) -> Dict[str, str]: + """Parses a version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/versions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def webhook_path( + project: str, + location: str, + agent: str, + webhook: str, + ) -> str: + """Returns a fully-qualified webhook string.""" + return "projects/{project}/locations/{location}/agents/{agent}/webhooks/{webhook}".format( + project=project, + location=location, + agent=agent, + webhook=webhook, + ) + + @staticmethod + def parse_webhook_path(path: str) -> Dict[str, str]: + """Parses a webhook path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/webhooks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, EnvironmentsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the environments client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, EnvironmentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, EnvironmentsTransport): + # transport is a EnvironmentsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_environments( + self, + request: Optional[Union[environment.ListEnvironmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnvironmentsPager: + r"""Returns the list of all environments in the specified + [Agent][google.cloud.dialogflow.cx.v3.Agent]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_environments(): + # Create a client + client = dialogflowcx_v3.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListEnvironmentsRequest, dict]): + The request object. The request message for + [Environments.ListEnvironments][google.cloud.dialogflow.cx.v3.Environments.ListEnvironments]. + parent (str): + Required. The + [Agent][google.cloud.dialogflow.cx.v3.Agent] to list all + environments for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.environments.pagers.ListEnvironmentsPager: + The response message for + [Environments.ListEnvironments][google.cloud.dialogflow.cx.v3.Environments.ListEnvironments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a environment.ListEnvironmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environment.ListEnvironmentsRequest): + request = environment.ListEnvironmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_environments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEnvironmentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_environment( + self, + request: Optional[Union[environment.GetEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environment.Environment: + r"""Retrieves the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_environment(): + # Create a client + client = dialogflowcx_v3.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetEnvironmentRequest, dict]): + The request object. The request message for + [Environments.GetEnvironment][google.cloud.dialogflow.cx.v3.Environments.GetEnvironment]. + name (str): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Environment: + Represents an environment for an + agent. You can create multiple versions + of your agent and publish them to + separate environments. When you edit an + agent, you are editing the draft agent. + At any point, you can save the draft + agent as an agent version, which is an + immutable snapshot of your agent. When + you save the draft agent, it is + published to the default environment. + When you create agent versions, you can + publish them to custom environments. You + can create a variety of custom + environments for testing, development, + production, etc. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a environment.GetEnvironmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environment.GetEnvironmentRequest): + request = environment.GetEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_environment( + self, + request: Optional[ + Union[gcdc_environment.CreateEnvironmentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + environment: Optional[gcdc_environment.Environment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates an + [Environment][google.cloud.dialogflow.cx.v3.Environment] in the + specified [Agent][google.cloud.dialogflow.cx.v3.Agent]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [Environment][google.cloud.dialogflow.cx.v3.Environment] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_environment(): + # Create a client + client = dialogflowcx_v3.EnvironmentsClient() + + # Initialize request argument(s) + environment = dialogflowcx_v3.Environment() + environment.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateEnvironmentRequest( + parent="parent_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateEnvironmentRequest, dict]): + The request object. The request message for + [Environments.CreateEnvironment][google.cloud.dialogflow.cx.v3.Environments.CreateEnvironment]. + parent (str): + Required. The + [Agent][google.cloud.dialogflow.cx.v3.Agent] to create + an + [Environment][google.cloud.dialogflow.cx.v3.Environment] + for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment (google.cloud.dialogflowcx_v3.types.Environment): + Required. The environment to create. + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.Environment` Represents an environment for an agent. You can create multiple versions + of your agent and publish them to separate + environments. When you edit an agent, you are editing + the draft agent. At any point, you can save the draft + agent as an agent version, which is an immutable + snapshot of your agent. When you save the draft + agent, it is published to the default environment. + When you create agent versions, you can publish them + to custom environments. You can create a variety of + custom environments for testing, development, + production, etc. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, environment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_environment.CreateEnvironmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_environment.CreateEnvironmentRequest): + request = gcdc_environment.CreateEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if environment is not None: + request.environment = environment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcdc_environment.Environment, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + def update_environment( + self, + request: Optional[ + Union[gcdc_environment.UpdateEnvironmentRequest, dict] + ] = None, + *, + environment: Optional[gcdc_environment.Environment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [Environment][google.cloud.dialogflow.cx.v3.Environment] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_environment(): + # Create a client + client = dialogflowcx_v3.EnvironmentsClient() + + # Initialize request argument(s) + environment = dialogflowcx_v3.Environment() + environment.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateEnvironmentRequest, dict]): + The request object. The request message for + [Environments.UpdateEnvironment][google.cloud.dialogflow.cx.v3.Environments.UpdateEnvironment]. + environment (google.cloud.dialogflowcx_v3.types.Environment): + Required. The environment to update. + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The mask to control which + fields get updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.Environment` Represents an environment for an agent. You can create multiple versions + of your agent and publish them to separate + environments. When you edit an agent, you are editing + the draft agent. At any point, you can save the draft + agent as an agent version, which is an immutable + snapshot of your agent. When you save the draft + agent, it is published to the default environment. + When you create agent versions, you can publish them + to custom environments. You can create a variety of + custom environments for testing, development, + production, etc. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([environment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_environment.UpdateEnvironmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_environment.UpdateEnvironmentRequest): + request = gcdc_environment.UpdateEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if environment is not None: + request.environment = environment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment.name", request.environment.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcdc_environment.Environment, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + def delete_environment( + self, + request: Optional[Union[environment.DeleteEnvironmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_environment(): + # Create a client + client = dialogflowcx_v3.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + client.delete_environment(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeleteEnvironmentRequest, dict]): + The request object. The request message for + [Environments.DeleteEnvironment][google.cloud.dialogflow.cx.v3.Environments.DeleteEnvironment]. + name (str): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment] + to delete. Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a environment.DeleteEnvironmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environment.DeleteEnvironmentRequest): + request = environment.DeleteEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def lookup_environment_history( + self, + request: Optional[ + Union[environment.LookupEnvironmentHistoryRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.LookupEnvironmentHistoryPager: + r"""Looks up the history of the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_lookup_environment_history(): + # Create a client + client = dialogflowcx_v3.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.LookupEnvironmentHistoryRequest( + name="name_value", + ) + + # Make the request + page_result = client.lookup_environment_history(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.LookupEnvironmentHistoryRequest, dict]): + The request object. The request message for + [Environments.LookupEnvironmentHistory][google.cloud.dialogflow.cx.v3.Environments.LookupEnvironmentHistory]. + name (str): + Required. Resource name of the environment to look up + the history for. Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.environments.pagers.LookupEnvironmentHistoryPager: + The response message for + [Environments.LookupEnvironmentHistory][google.cloud.dialogflow.cx.v3.Environments.LookupEnvironmentHistory]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a environment.LookupEnvironmentHistoryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environment.LookupEnvironmentHistoryRequest): + request = environment.LookupEnvironmentHistoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.lookup_environment_history + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.LookupEnvironmentHistoryPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_continuous_test( + self, + request: Optional[Union[environment.RunContinuousTestRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Kicks off a continuous test under the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [RunContinuousTestMetadata][google.cloud.dialogflow.cx.v3.RunContinuousTestMetadata] + - ``response``: + [RunContinuousTestResponse][google.cloud.dialogflow.cx.v3.RunContinuousTestResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_run_continuous_test(): + # Create a client + client = dialogflowcx_v3.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.RunContinuousTestRequest( + environment="environment_value", + ) + + # Make the request + operation = client.run_continuous_test(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.RunContinuousTestRequest, dict]): + The request object. The request message for + [Environments.RunContinuousTest][google.cloud.dialogflow.cx.v3.Environments.RunContinuousTest]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.RunContinuousTestResponse` The response message for + [Environments.RunContinuousTest][google.cloud.dialogflow.cx.v3.Environments.RunContinuousTest]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environment.RunContinuousTestRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environment.RunContinuousTestRequest): + request = environment.RunContinuousTestRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_continuous_test] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environment.RunContinuousTestResponse, + metadata_type=environment.RunContinuousTestMetadata, + ) + + # Done; return the response. + return response + + def list_continuous_test_results( + self, + request: Optional[ + Union[environment.ListContinuousTestResultsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListContinuousTestResultsPager: + r"""Fetches a list of continuous test results for a given + environment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_continuous_test_results(): + # Create a client + client = dialogflowcx_v3.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListContinuousTestResultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_continuous_test_results(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListContinuousTestResultsRequest, dict]): + The request object. The request message for + [Environments.ListContinuousTestResults][google.cloud.dialogflow.cx.v3.Environments.ListContinuousTestResults]. + parent (str): + Required. The environment to list results for. Format: + ``projects//locations//agents// environments/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.environments.pagers.ListContinuousTestResultsPager: + The response message for + [Environments.ListTestCaseResults][]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a environment.ListContinuousTestResultsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environment.ListContinuousTestResultsRequest): + request = environment.ListContinuousTestResultsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_continuous_test_results + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListContinuousTestResultsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def deploy_flow( + self, + request: Optional[Union[environment.DeployFlowRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deploys a flow to the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [DeployFlowMetadata][google.cloud.dialogflow.cx.v3.DeployFlowMetadata] + - ``response``: + [DeployFlowResponse][google.cloud.dialogflow.cx.v3.DeployFlowResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_deploy_flow(): + # Create a client + client = dialogflowcx_v3.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeployFlowRequest( + environment="environment_value", + flow_version="flow_version_value", + ) + + # Make the request + operation = client.deploy_flow(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeployFlowRequest, dict]): + The request object. The request message for + [Environments.DeployFlow][google.cloud.dialogflow.cx.v3.Environments.DeployFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.DeployFlowResponse` The response message for + [Environments.DeployFlow][google.cloud.dialogflow.cx.v3.Environments.DeployFlow]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environment.DeployFlowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environment.DeployFlowRequest): + request = environment.DeployFlowRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.deploy_flow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environment.DeployFlowResponse, + metadata_type=environment.DeployFlowMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "EnvironmentsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EnvironmentsClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/pagers.py new file mode 100644 index 000000000000..ab41947f0827 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/pagers.py @@ -0,0 +1,415 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import environment + + +class ListEnvironmentsPager: + """A pager for iterating through ``list_environments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListEnvironmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEnvironments`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListEnvironmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., environment.ListEnvironmentsResponse], + request: environment.ListEnvironmentsRequest, + response: environment.ListEnvironmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListEnvironmentsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListEnvironmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = environment.ListEnvironmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[environment.ListEnvironmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[environment.Environment]: + for page in self.pages: + yield from page.environments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEnvironmentsAsyncPager: + """A pager for iterating through ``list_environments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListEnvironmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEnvironments`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListEnvironmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[environment.ListEnvironmentsResponse]], + request: environment.ListEnvironmentsRequest, + response: environment.ListEnvironmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListEnvironmentsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListEnvironmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = environment.ListEnvironmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[environment.ListEnvironmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[environment.Environment]: + async def async_generator(): + async for page in self.pages: + for response in page.environments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class LookupEnvironmentHistoryPager: + """A pager for iterating through ``lookup_environment_history`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.LookupEnvironmentHistoryResponse` object, and + provides an ``__iter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``LookupEnvironmentHistory`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.LookupEnvironmentHistoryResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., environment.LookupEnvironmentHistoryResponse], + request: environment.LookupEnvironmentHistoryRequest, + response: environment.LookupEnvironmentHistoryResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.LookupEnvironmentHistoryRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.LookupEnvironmentHistoryResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = environment.LookupEnvironmentHistoryRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[environment.LookupEnvironmentHistoryResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[environment.Environment]: + for page in self.pages: + yield from page.environments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class LookupEnvironmentHistoryAsyncPager: + """A pager for iterating through ``lookup_environment_history`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.LookupEnvironmentHistoryResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``LookupEnvironmentHistory`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.LookupEnvironmentHistoryResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[environment.LookupEnvironmentHistoryResponse]], + request: environment.LookupEnvironmentHistoryRequest, + response: environment.LookupEnvironmentHistoryResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.LookupEnvironmentHistoryRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.LookupEnvironmentHistoryResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = environment.LookupEnvironmentHistoryRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[environment.LookupEnvironmentHistoryResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[environment.Environment]: + async def async_generator(): + async for page in self.pages: + for response in page.environments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListContinuousTestResultsPager: + """A pager for iterating through ``list_continuous_test_results`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListContinuousTestResultsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``continuous_test_results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListContinuousTestResults`` requests and continue to iterate + through the ``continuous_test_results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListContinuousTestResultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., environment.ListContinuousTestResultsResponse], + request: environment.ListContinuousTestResultsRequest, + response: environment.ListContinuousTestResultsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListContinuousTestResultsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListContinuousTestResultsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = environment.ListContinuousTestResultsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[environment.ListContinuousTestResultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[environment.ContinuousTestResult]: + for page in self.pages: + yield from page.continuous_test_results + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListContinuousTestResultsAsyncPager: + """A pager for iterating through ``list_continuous_test_results`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListContinuousTestResultsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``continuous_test_results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListContinuousTestResults`` requests and continue to iterate + through the ``continuous_test_results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListContinuousTestResultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[environment.ListContinuousTestResultsResponse]], + request: environment.ListContinuousTestResultsRequest, + response: environment.ListContinuousTestResultsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListContinuousTestResultsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListContinuousTestResultsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = environment.ListContinuousTestResultsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[environment.ListContinuousTestResultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[environment.ContinuousTestResult]: + async def async_generator(): + async for page in self.pages: + for response in page.continuous_test_results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/__init__.py new file mode 100644 index 000000000000..befb31951776 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import EnvironmentsTransport +from .grpc import EnvironmentsGrpcTransport +from .grpc_asyncio import EnvironmentsGrpcAsyncIOTransport +from .rest import EnvironmentsRestInterceptor, EnvironmentsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[EnvironmentsTransport]] +_transport_registry["grpc"] = EnvironmentsGrpcTransport +_transport_registry["grpc_asyncio"] = EnvironmentsGrpcAsyncIOTransport +_transport_registry["rest"] = EnvironmentsRestTransport + +__all__ = ( + "EnvironmentsTransport", + "EnvironmentsGrpcTransport", + "EnvironmentsGrpcAsyncIOTransport", + "EnvironmentsRestTransport", + "EnvironmentsRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/base.py new file mode 100644 index 000000000000..2954e4b53ef8 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/base.py @@ -0,0 +1,336 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import environment +from google.cloud.dialogflowcx_v3.types import environment as gcdc_environment + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class EnvironmentsTransport(abc.ABC): + """Abstract transport class for Environments.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_environments: gapic_v1.method.wrap_method( + self.list_environments, + default_timeout=None, + client_info=client_info, + ), + self.get_environment: gapic_v1.method.wrap_method( + self.get_environment, + default_timeout=None, + client_info=client_info, + ), + self.create_environment: gapic_v1.method.wrap_method( + self.create_environment, + default_timeout=None, + client_info=client_info, + ), + self.update_environment: gapic_v1.method.wrap_method( + self.update_environment, + default_timeout=None, + client_info=client_info, + ), + self.delete_environment: gapic_v1.method.wrap_method( + self.delete_environment, + default_timeout=None, + client_info=client_info, + ), + self.lookup_environment_history: gapic_v1.method.wrap_method( + self.lookup_environment_history, + default_timeout=None, + client_info=client_info, + ), + self.run_continuous_test: gapic_v1.method.wrap_method( + self.run_continuous_test, + default_timeout=None, + client_info=client_info, + ), + self.list_continuous_test_results: gapic_v1.method.wrap_method( + self.list_continuous_test_results, + default_timeout=None, + client_info=client_info, + ), + self.deploy_flow: gapic_v1.method.wrap_method( + self.deploy_flow, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_environments( + self, + ) -> Callable[ + [environment.ListEnvironmentsRequest], + Union[ + environment.ListEnvironmentsResponse, + Awaitable[environment.ListEnvironmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_environment( + self, + ) -> Callable[ + [environment.GetEnvironmentRequest], + Union[environment.Environment, Awaitable[environment.Environment]], + ]: + raise NotImplementedError() + + @property + def create_environment( + self, + ) -> Callable[ + [gcdc_environment.CreateEnvironmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_environment( + self, + ) -> Callable[ + [gcdc_environment.UpdateEnvironmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_environment( + self, + ) -> Callable[ + [environment.DeleteEnvironmentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def lookup_environment_history( + self, + ) -> Callable[ + [environment.LookupEnvironmentHistoryRequest], + Union[ + environment.LookupEnvironmentHistoryResponse, + Awaitable[environment.LookupEnvironmentHistoryResponse], + ], + ]: + raise NotImplementedError() + + @property + def run_continuous_test( + self, + ) -> Callable[ + [environment.RunContinuousTestRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_continuous_test_results( + self, + ) -> Callable[ + [environment.ListContinuousTestResultsRequest], + Union[ + environment.ListContinuousTestResultsResponse, + Awaitable[environment.ListContinuousTestResultsResponse], + ], + ]: + raise NotImplementedError() + + @property + def deploy_flow( + self, + ) -> Callable[ + [environment.DeployFlowRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("EnvironmentsTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/grpc.py new file mode 100644 index 000000000000..f666130f84b7 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/grpc.py @@ -0,0 +1,644 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import environment +from google.cloud.dialogflowcx_v3.types import environment as gcdc_environment + +from .base import DEFAULT_CLIENT_INFO, EnvironmentsTransport + + +class EnvironmentsGrpcTransport(EnvironmentsTransport): + """gRPC backend transport for Environments. + + Service for managing + [Environments][google.cloud.dialogflow.cx.v3.Environment]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_environments( + self, + ) -> Callable[ + [environment.ListEnvironmentsRequest], environment.ListEnvironmentsResponse + ]: + r"""Return a callable for the list environments method over gRPC. + + Returns the list of all environments in the specified + [Agent][google.cloud.dialogflow.cx.v3.Agent]. + + Returns: + Callable[[~.ListEnvironmentsRequest], + ~.ListEnvironmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_environments" not in self._stubs: + self._stubs["list_environments"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/ListEnvironments", + request_serializer=environment.ListEnvironmentsRequest.serialize, + response_deserializer=environment.ListEnvironmentsResponse.deserialize, + ) + return self._stubs["list_environments"] + + @property + def get_environment( + self, + ) -> Callable[[environment.GetEnvironmentRequest], environment.Environment]: + r"""Return a callable for the get environment method over gRPC. + + Retrieves the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.GetEnvironmentRequest], + ~.Environment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_environment" not in self._stubs: + self._stubs["get_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/GetEnvironment", + request_serializer=environment.GetEnvironmentRequest.serialize, + response_deserializer=environment.Environment.deserialize, + ) + return self._stubs["get_environment"] + + @property + def create_environment( + self, + ) -> Callable[ + [gcdc_environment.CreateEnvironmentRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create environment method over gRPC. + + Creates an + [Environment][google.cloud.dialogflow.cx.v3.Environment] in the + specified [Agent][google.cloud.dialogflow.cx.v3.Agent]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [Environment][google.cloud.dialogflow.cx.v3.Environment] + + Returns: + Callable[[~.CreateEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_environment" not in self._stubs: + self._stubs["create_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/CreateEnvironment", + request_serializer=gcdc_environment.CreateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_environment"] + + @property + def update_environment( + self, + ) -> Callable[ + [gcdc_environment.UpdateEnvironmentRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update environment method over gRPC. + + Updates the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [Environment][google.cloud.dialogflow.cx.v3.Environment] + + Returns: + Callable[[~.UpdateEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_environment" not in self._stubs: + self._stubs["update_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/UpdateEnvironment", + request_serializer=gcdc_environment.UpdateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_environment"] + + @property + def delete_environment( + self, + ) -> Callable[[environment.DeleteEnvironmentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete environment method over gRPC. + + Deletes the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.DeleteEnvironmentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_environment" not in self._stubs: + self._stubs["delete_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/DeleteEnvironment", + request_serializer=environment.DeleteEnvironmentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_environment"] + + @property + def lookup_environment_history( + self, + ) -> Callable[ + [environment.LookupEnvironmentHistoryRequest], + environment.LookupEnvironmentHistoryResponse, + ]: + r"""Return a callable for the lookup environment history method over gRPC. + + Looks up the history of the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.LookupEnvironmentHistoryRequest], + ~.LookupEnvironmentHistoryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lookup_environment_history" not in self._stubs: + self._stubs["lookup_environment_history"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/LookupEnvironmentHistory", + request_serializer=environment.LookupEnvironmentHistoryRequest.serialize, + response_deserializer=environment.LookupEnvironmentHistoryResponse.deserialize, + ) + return self._stubs["lookup_environment_history"] + + @property + def run_continuous_test( + self, + ) -> Callable[[environment.RunContinuousTestRequest], operations_pb2.Operation]: + r"""Return a callable for the run continuous test method over gRPC. + + Kicks off a continuous test under the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [RunContinuousTestMetadata][google.cloud.dialogflow.cx.v3.RunContinuousTestMetadata] + - ``response``: + [RunContinuousTestResponse][google.cloud.dialogflow.cx.v3.RunContinuousTestResponse] + + Returns: + Callable[[~.RunContinuousTestRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_continuous_test" not in self._stubs: + self._stubs["run_continuous_test"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/RunContinuousTest", + request_serializer=environment.RunContinuousTestRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["run_continuous_test"] + + @property + def list_continuous_test_results( + self, + ) -> Callable[ + [environment.ListContinuousTestResultsRequest], + environment.ListContinuousTestResultsResponse, + ]: + r"""Return a callable for the list continuous test results method over gRPC. + + Fetches a list of continuous test results for a given + environment. + + Returns: + Callable[[~.ListContinuousTestResultsRequest], + ~.ListContinuousTestResultsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_continuous_test_results" not in self._stubs: + self._stubs["list_continuous_test_results"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/ListContinuousTestResults", + request_serializer=environment.ListContinuousTestResultsRequest.serialize, + response_deserializer=environment.ListContinuousTestResultsResponse.deserialize, + ) + return self._stubs["list_continuous_test_results"] + + @property + def deploy_flow( + self, + ) -> Callable[[environment.DeployFlowRequest], operations_pb2.Operation]: + r"""Return a callable for the deploy flow method over gRPC. + + Deploys a flow to the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [DeployFlowMetadata][google.cloud.dialogflow.cx.v3.DeployFlowMetadata] + - ``response``: + [DeployFlowResponse][google.cloud.dialogflow.cx.v3.DeployFlowResponse] + + Returns: + Callable[[~.DeployFlowRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "deploy_flow" not in self._stubs: + self._stubs["deploy_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/DeployFlow", + request_serializer=environment.DeployFlowRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["deploy_flow"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("EnvironmentsGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/grpc_asyncio.py new file mode 100644 index 000000000000..789630ad0548 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/grpc_asyncio.py @@ -0,0 +1,650 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import environment +from google.cloud.dialogflowcx_v3.types import environment as gcdc_environment + +from .base import DEFAULT_CLIENT_INFO, EnvironmentsTransport +from .grpc import EnvironmentsGrpcTransport + + +class EnvironmentsGrpcAsyncIOTransport(EnvironmentsTransport): + """gRPC AsyncIO backend transport for Environments. + + Service for managing + [Environments][google.cloud.dialogflow.cx.v3.Environment]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_environments( + self, + ) -> Callable[ + [environment.ListEnvironmentsRequest], + Awaitable[environment.ListEnvironmentsResponse], + ]: + r"""Return a callable for the list environments method over gRPC. + + Returns the list of all environments in the specified + [Agent][google.cloud.dialogflow.cx.v3.Agent]. + + Returns: + Callable[[~.ListEnvironmentsRequest], + Awaitable[~.ListEnvironmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_environments" not in self._stubs: + self._stubs["list_environments"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/ListEnvironments", + request_serializer=environment.ListEnvironmentsRequest.serialize, + response_deserializer=environment.ListEnvironmentsResponse.deserialize, + ) + return self._stubs["list_environments"] + + @property + def get_environment( + self, + ) -> Callable[ + [environment.GetEnvironmentRequest], Awaitable[environment.Environment] + ]: + r"""Return a callable for the get environment method over gRPC. + + Retrieves the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.GetEnvironmentRequest], + Awaitable[~.Environment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_environment" not in self._stubs: + self._stubs["get_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/GetEnvironment", + request_serializer=environment.GetEnvironmentRequest.serialize, + response_deserializer=environment.Environment.deserialize, + ) + return self._stubs["get_environment"] + + @property + def create_environment( + self, + ) -> Callable[ + [gcdc_environment.CreateEnvironmentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create environment method over gRPC. + + Creates an + [Environment][google.cloud.dialogflow.cx.v3.Environment] in the + specified [Agent][google.cloud.dialogflow.cx.v3.Agent]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [Environment][google.cloud.dialogflow.cx.v3.Environment] + + Returns: + Callable[[~.CreateEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_environment" not in self._stubs: + self._stubs["create_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/CreateEnvironment", + request_serializer=gcdc_environment.CreateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_environment"] + + @property + def update_environment( + self, + ) -> Callable[ + [gcdc_environment.UpdateEnvironmentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update environment method over gRPC. + + Updates the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [Environment][google.cloud.dialogflow.cx.v3.Environment] + + Returns: + Callable[[~.UpdateEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_environment" not in self._stubs: + self._stubs["update_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/UpdateEnvironment", + request_serializer=gcdc_environment.UpdateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_environment"] + + @property + def delete_environment( + self, + ) -> Callable[[environment.DeleteEnvironmentRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete environment method over gRPC. + + Deletes the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.DeleteEnvironmentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_environment" not in self._stubs: + self._stubs["delete_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/DeleteEnvironment", + request_serializer=environment.DeleteEnvironmentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_environment"] + + @property + def lookup_environment_history( + self, + ) -> Callable[ + [environment.LookupEnvironmentHistoryRequest], + Awaitable[environment.LookupEnvironmentHistoryResponse], + ]: + r"""Return a callable for the lookup environment history method over gRPC. + + Looks up the history of the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.LookupEnvironmentHistoryRequest], + Awaitable[~.LookupEnvironmentHistoryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lookup_environment_history" not in self._stubs: + self._stubs["lookup_environment_history"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/LookupEnvironmentHistory", + request_serializer=environment.LookupEnvironmentHistoryRequest.serialize, + response_deserializer=environment.LookupEnvironmentHistoryResponse.deserialize, + ) + return self._stubs["lookup_environment_history"] + + @property + def run_continuous_test( + self, + ) -> Callable[ + [environment.RunContinuousTestRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the run continuous test method over gRPC. + + Kicks off a continuous test under the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [RunContinuousTestMetadata][google.cloud.dialogflow.cx.v3.RunContinuousTestMetadata] + - ``response``: + [RunContinuousTestResponse][google.cloud.dialogflow.cx.v3.RunContinuousTestResponse] + + Returns: + Callable[[~.RunContinuousTestRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_continuous_test" not in self._stubs: + self._stubs["run_continuous_test"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/RunContinuousTest", + request_serializer=environment.RunContinuousTestRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["run_continuous_test"] + + @property + def list_continuous_test_results( + self, + ) -> Callable[ + [environment.ListContinuousTestResultsRequest], + Awaitable[environment.ListContinuousTestResultsResponse], + ]: + r"""Return a callable for the list continuous test results method over gRPC. + + Fetches a list of continuous test results for a given + environment. + + Returns: + Callable[[~.ListContinuousTestResultsRequest], + Awaitable[~.ListContinuousTestResultsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_continuous_test_results" not in self._stubs: + self._stubs["list_continuous_test_results"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/ListContinuousTestResults", + request_serializer=environment.ListContinuousTestResultsRequest.serialize, + response_deserializer=environment.ListContinuousTestResultsResponse.deserialize, + ) + return self._stubs["list_continuous_test_results"] + + @property + def deploy_flow( + self, + ) -> Callable[[environment.DeployFlowRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the deploy flow method over gRPC. + + Deploys a flow to the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [DeployFlowMetadata][google.cloud.dialogflow.cx.v3.DeployFlowMetadata] + - ``response``: + [DeployFlowResponse][google.cloud.dialogflow.cx.v3.DeployFlowResponse] + + Returns: + Callable[[~.DeployFlowRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "deploy_flow" not in self._stubs: + self._stubs["deploy_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Environments/DeployFlow", + request_serializer=environment.DeployFlowRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["deploy_flow"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("EnvironmentsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/rest.py new file mode 100644 index 000000000000..8a896a82d830 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/rest.py @@ -0,0 +1,1902 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import environment +from google.cloud.dialogflowcx_v3.types import environment as gcdc_environment + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import EnvironmentsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class EnvironmentsRestInterceptor: + """Interceptor for Environments. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the EnvironmentsRestTransport. + + .. code-block:: python + class MyCustomEnvironmentsInterceptor(EnvironmentsRestInterceptor): + def pre_create_environment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_environment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_environment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_deploy_flow(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_deploy_flow(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_environment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_environment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_continuous_test_results(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_continuous_test_results(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_environments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_environments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_lookup_environment_history(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_lookup_environment_history(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_continuous_test(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_continuous_test(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_environment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_environment(self, response): + logging.log(f"Received response: {response}") + return response + + transport = EnvironmentsRestTransport(interceptor=MyCustomEnvironmentsInterceptor()) + client = EnvironmentsClient(transport=transport) + + + """ + + def pre_create_environment( + self, + request: gcdc_environment.CreateEnvironmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_environment.CreateEnvironmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_environment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_create_environment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_environment + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_delete_environment( + self, + request: environment.DeleteEnvironmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environment.DeleteEnvironmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_environment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def pre_deploy_flow( + self, + request: environment.DeployFlowRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environment.DeployFlowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for deploy_flow + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_deploy_flow( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for deploy_flow + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_get_environment( + self, + request: environment.GetEnvironmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environment.GetEnvironmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_environment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_get_environment( + self, response: environment.Environment + ) -> environment.Environment: + """Post-rpc interceptor for get_environment + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_list_continuous_test_results( + self, + request: environment.ListContinuousTestResultsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environment.ListContinuousTestResultsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_continuous_test_results + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_list_continuous_test_results( + self, response: environment.ListContinuousTestResultsResponse + ) -> environment.ListContinuousTestResultsResponse: + """Post-rpc interceptor for list_continuous_test_results + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_list_environments( + self, + request: environment.ListEnvironmentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environment.ListEnvironmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_environments + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_list_environments( + self, response: environment.ListEnvironmentsResponse + ) -> environment.ListEnvironmentsResponse: + """Post-rpc interceptor for list_environments + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_lookup_environment_history( + self, + request: environment.LookupEnvironmentHistoryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environment.LookupEnvironmentHistoryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for lookup_environment_history + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_lookup_environment_history( + self, response: environment.LookupEnvironmentHistoryResponse + ) -> environment.LookupEnvironmentHistoryResponse: + """Post-rpc interceptor for lookup_environment_history + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_run_continuous_test( + self, + request: environment.RunContinuousTestRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environment.RunContinuousTestRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_continuous_test + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_run_continuous_test( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for run_continuous_test + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_update_environment( + self, + request: gcdc_environment.UpdateEnvironmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_environment.UpdateEnvironmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_environment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_update_environment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_environment + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class EnvironmentsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: EnvironmentsRestInterceptor + + +class EnvironmentsRestTransport(EnvironmentsTransport): + """REST backend transport for Environments. + + Service for managing + [Environments][google.cloud.dialogflow.cx.v3.Environment]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[EnvironmentsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or EnvironmentsRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v3", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateEnvironment(EnvironmentsRestStub): + def __hash__(self): + return hash("CreateEnvironment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_environment.CreateEnvironmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create environment method over HTTP. + + Args: + request (~.gcdc_environment.CreateEnvironmentRequest): + The request object. The request message for + [Environments.CreateEnvironment][google.cloud.dialogflow.cx.v3.Environments.CreateEnvironment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/environments", + "body": "environment", + }, + ] + request, metadata = self._interceptor.pre_create_environment( + request, metadata + ) + pb_request = gcdc_environment.CreateEnvironmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_environment(resp) + return resp + + class _DeleteEnvironment(EnvironmentsRestStub): + def __hash__(self): + return hash("DeleteEnvironment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: environment.DeleteEnvironmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete environment method over HTTP. + + Args: + request (~.environment.DeleteEnvironmentRequest): + The request object. The request message for + [Environments.DeleteEnvironment][google.cloud.dialogflow.cx.v3.Environments.DeleteEnvironment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/environments/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_environment( + request, metadata + ) + pb_request = environment.DeleteEnvironmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeployFlow(EnvironmentsRestStub): + def __hash__(self): + return hash("DeployFlow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: environment.DeployFlowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the deploy flow method over HTTP. + + Args: + request (~.environment.DeployFlowRequest): + The request object. The request message for + [Environments.DeployFlow][google.cloud.dialogflow.cx.v3.Environments.DeployFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{environment=projects/*/locations/*/agents/*/environments/*}:deployFlow", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_deploy_flow(request, metadata) + pb_request = environment.DeployFlowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_deploy_flow(resp) + return resp + + class _GetEnvironment(EnvironmentsRestStub): + def __hash__(self): + return hash("GetEnvironment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: environment.GetEnvironmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environment.Environment: + r"""Call the get environment method over HTTP. + + Args: + request (~.environment.GetEnvironmentRequest): + The request object. The request message for + [Environments.GetEnvironment][google.cloud.dialogflow.cx.v3.Environments.GetEnvironment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environment.Environment: + Represents an environment for an + agent. You can create multiple versions + of your agent and publish them to + separate environments. When you edit an + agent, you are editing the draft agent. + At any point, you can save the draft + agent as an agent version, which is an + immutable snapshot of your agent. When + you save the draft agent, it is + published to the default environment. + When you create agent versions, you can + publish them to custom environments. You + can create a variety of custom + environments for testing, development, + production, etc. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/environments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_environment(request, metadata) + pb_request = environment.GetEnvironmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environment.Environment() + pb_resp = environment.Environment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_environment(resp) + return resp + + class _ListContinuousTestResults(EnvironmentsRestStub): + def __hash__(self): + return hash("ListContinuousTestResults") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: environment.ListContinuousTestResultsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environment.ListContinuousTestResultsResponse: + r"""Call the list continuous test + results method over HTTP. + + Args: + request (~.environment.ListContinuousTestResultsRequest): + The request object. The request message for + [Environments.ListContinuousTestResults][google.cloud.dialogflow.cx.v3.Environments.ListContinuousTestResults]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environment.ListContinuousTestResultsResponse: + The response message for + [Environments.ListTestCaseResults][]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/environments/*}/continuousTestResults", + }, + ] + request, metadata = self._interceptor.pre_list_continuous_test_results( + request, metadata + ) + pb_request = environment.ListContinuousTestResultsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environment.ListContinuousTestResultsResponse() + pb_resp = environment.ListContinuousTestResultsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_continuous_test_results(resp) + return resp + + class _ListEnvironments(EnvironmentsRestStub): + def __hash__(self): + return hash("ListEnvironments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: environment.ListEnvironmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environment.ListEnvironmentsResponse: + r"""Call the list environments method over HTTP. + + Args: + request (~.environment.ListEnvironmentsRequest): + The request object. The request message for + [Environments.ListEnvironments][google.cloud.dialogflow.cx.v3.Environments.ListEnvironments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environment.ListEnvironmentsResponse: + The response message for + [Environments.ListEnvironments][google.cloud.dialogflow.cx.v3.Environments.ListEnvironments]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/environments", + }, + ] + request, metadata = self._interceptor.pre_list_environments( + request, metadata + ) + pb_request = environment.ListEnvironmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environment.ListEnvironmentsResponse() + pb_resp = environment.ListEnvironmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_environments(resp) + return resp + + class _LookupEnvironmentHistory(EnvironmentsRestStub): + def __hash__(self): + return hash("LookupEnvironmentHistory") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: environment.LookupEnvironmentHistoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environment.LookupEnvironmentHistoryResponse: + r"""Call the lookup environment + history method over HTTP. + + Args: + request (~.environment.LookupEnvironmentHistoryRequest): + The request object. The request message for + [Environments.LookupEnvironmentHistory][google.cloud.dialogflow.cx.v3.Environments.LookupEnvironmentHistory]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environment.LookupEnvironmentHistoryResponse: + The response message for + [Environments.LookupEnvironmentHistory][google.cloud.dialogflow.cx.v3.Environments.LookupEnvironmentHistory]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/environments/*}:lookupEnvironmentHistory", + }, + ] + request, metadata = self._interceptor.pre_lookup_environment_history( + request, metadata + ) + pb_request = environment.LookupEnvironmentHistoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environment.LookupEnvironmentHistoryResponse() + pb_resp = environment.LookupEnvironmentHistoryResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_lookup_environment_history(resp) + return resp + + class _RunContinuousTest(EnvironmentsRestStub): + def __hash__(self): + return hash("RunContinuousTest") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: environment.RunContinuousTestRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the run continuous test method over HTTP. + + Args: + request (~.environment.RunContinuousTestRequest): + The request object. The request message for + [Environments.RunContinuousTest][google.cloud.dialogflow.cx.v3.Environments.RunContinuousTest]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{environment=projects/*/locations/*/agents/*/environments/*}:runContinuousTest", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_run_continuous_test( + request, metadata + ) + pb_request = environment.RunContinuousTestRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_continuous_test(resp) + return resp + + class _UpdateEnvironment(EnvironmentsRestStub): + def __hash__(self): + return hash("UpdateEnvironment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_environment.UpdateEnvironmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update environment method over HTTP. + + Args: + request (~.gcdc_environment.UpdateEnvironmentRequest): + The request object. The request message for + [Environments.UpdateEnvironment][google.cloud.dialogflow.cx.v3.Environments.UpdateEnvironment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{environment.name=projects/*/locations/*/agents/*/environments/*}", + "body": "environment", + }, + ] + request, metadata = self._interceptor.pre_update_environment( + request, metadata + ) + pb_request = gcdc_environment.UpdateEnvironmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_environment(resp) + return resp + + @property + def create_environment( + self, + ) -> Callable[ + [gcdc_environment.CreateEnvironmentRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEnvironment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_environment( + self, + ) -> Callable[[environment.DeleteEnvironmentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEnvironment(self._session, self._host, self._interceptor) # type: ignore + + @property + def deploy_flow( + self, + ) -> Callable[[environment.DeployFlowRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeployFlow(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_environment( + self, + ) -> Callable[[environment.GetEnvironmentRequest], environment.Environment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEnvironment(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_continuous_test_results( + self, + ) -> Callable[ + [environment.ListContinuousTestResultsRequest], + environment.ListContinuousTestResultsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListContinuousTestResults(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_environments( + self, + ) -> Callable[ + [environment.ListEnvironmentsRequest], environment.ListEnvironmentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEnvironments(self._session, self._host, self._interceptor) # type: ignore + + @property + def lookup_environment_history( + self, + ) -> Callable[ + [environment.LookupEnvironmentHistoryRequest], + environment.LookupEnvironmentHistoryResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LookupEnvironmentHistory(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_continuous_test( + self, + ) -> Callable[[environment.RunContinuousTestRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunContinuousTest(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_environment( + self, + ) -> Callable[ + [gcdc_environment.UpdateEnvironmentRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEnvironment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(EnvironmentsRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(EnvironmentsRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(EnvironmentsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(EnvironmentsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(EnvironmentsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("EnvironmentsRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/__init__.py new file mode 100644 index 000000000000..6d770bc6f10e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ExperimentsAsyncClient +from .client import ExperimentsClient + +__all__ = ( + "ExperimentsClient", + "ExperimentsAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py new file mode 100644 index 000000000000..26e36d7c896b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/async_client.py @@ -0,0 +1,1269 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.experiments import pagers +from google.cloud.dialogflowcx_v3.types import experiment +from google.cloud.dialogflowcx_v3.types import experiment as gcdc_experiment + +from .client import ExperimentsClient +from .transports.base import DEFAULT_CLIENT_INFO, ExperimentsTransport +from .transports.grpc_asyncio import ExperimentsGrpcAsyncIOTransport + + +class ExperimentsAsyncClient: + """Service for managing + [Experiments][google.cloud.dialogflow.cx.v3.Experiment]. + """ + + _client: ExperimentsClient + + DEFAULT_ENDPOINT = ExperimentsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ExperimentsClient.DEFAULT_MTLS_ENDPOINT + + experiment_path = staticmethod(ExperimentsClient.experiment_path) + parse_experiment_path = staticmethod(ExperimentsClient.parse_experiment_path) + version_path = staticmethod(ExperimentsClient.version_path) + parse_version_path = staticmethod(ExperimentsClient.parse_version_path) + common_billing_account_path = staticmethod( + ExperimentsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ExperimentsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ExperimentsClient.common_folder_path) + parse_common_folder_path = staticmethod(ExperimentsClient.parse_common_folder_path) + common_organization_path = staticmethod(ExperimentsClient.common_organization_path) + parse_common_organization_path = staticmethod( + ExperimentsClient.parse_common_organization_path + ) + common_project_path = staticmethod(ExperimentsClient.common_project_path) + parse_common_project_path = staticmethod( + ExperimentsClient.parse_common_project_path + ) + common_location_path = staticmethod(ExperimentsClient.common_location_path) + parse_common_location_path = staticmethod( + ExperimentsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ExperimentsAsyncClient: The constructed client. + """ + return ExperimentsClient.from_service_account_info.__func__(ExperimentsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ExperimentsAsyncClient: The constructed client. + """ + return ExperimentsClient.from_service_account_file.__func__(ExperimentsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ExperimentsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ExperimentsTransport: + """Returns the transport used by the client instance. + + Returns: + ExperimentsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ExperimentsClient).get_transport_class, type(ExperimentsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ExperimentsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the experiments client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ExperimentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ExperimentsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_experiments( + self, + request: Optional[Union[experiment.ListExperimentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExperimentsAsyncPager: + r"""Returns the list of all experiments in the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_experiments(): + # Create a client + client = dialogflowcx_v3.ExperimentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListExperimentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_experiments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListExperimentsRequest, dict]]): + The request object. The request message for + [Experiments.ListExperiments][google.cloud.dialogflow.cx.v3.Experiments.ListExperiments]. + parent (:class:`str`): + Required. The + [Environment][google.cloud.dialogflow.cx.v3.Environment] + to list all environments for. Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.experiments.pagers.ListExperimentsAsyncPager: + The response message for + [Experiments.ListExperiments][google.cloud.dialogflow.cx.v3.Experiments.ListExperiments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = experiment.ListExperimentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_experiments, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExperimentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_experiment( + self, + request: Optional[Union[experiment.GetExperimentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> experiment.Experiment: + r"""Retrieves the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetExperimentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_experiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetExperimentRequest, dict]]): + The request object. The request message for + [Experiments.GetExperiment][google.cloud.dialogflow.cx.v3.Experiments.GetExperiment]. + name (:class:`str`): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + Format: + ``projects//locations//agents//environments//experiments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Experiment: + Represents an experiment in an + environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = experiment.GetExperimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_experiment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_experiment( + self, + request: Optional[Union[gcdc_experiment.CreateExperimentRequest, dict]] = None, + *, + parent: Optional[str] = None, + experiment: Optional[gcdc_experiment.Experiment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_experiment.Experiment: + r"""Creates an + [Experiment][google.cloud.dialogflow.cx.v3.Experiment] in the + specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsAsyncClient() + + # Initialize request argument(s) + experiment = dialogflowcx_v3.Experiment() + experiment.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateExperimentRequest( + parent="parent_value", + experiment=experiment, + ) + + # Make the request + response = await client.create_experiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateExperimentRequest, dict]]): + The request object. The request message for + [Experiments.CreateExperiment][google.cloud.dialogflow.cx.v3.Experiments.CreateExperiment]. + parent (:class:`str`): + Required. The + [Agent][google.cloud.dialogflow.cx.v3.Agent] to create + an + [Environment][google.cloud.dialogflow.cx.v3.Environment] + for. Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + experiment (:class:`google.cloud.dialogflowcx_v3.types.Experiment`): + Required. The experiment to create. + This corresponds to the ``experiment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Experiment: + Represents an experiment in an + environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, experiment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_experiment.CreateExperimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if experiment is not None: + request.experiment = experiment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_experiment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_experiment( + self, + request: Optional[Union[gcdc_experiment.UpdateExperimentRequest, dict]] = None, + *, + experiment: Optional[gcdc_experiment.Experiment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_experiment.Experiment: + r"""Updates the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsAsyncClient() + + # Initialize request argument(s) + experiment = dialogflowcx_v3.Experiment() + experiment.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateExperimentRequest( + experiment=experiment, + ) + + # Make the request + response = await client.update_experiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateExperimentRequest, dict]]): + The request object. The request message for + [Experiments.UpdateExperiment][google.cloud.dialogflow.cx.v3.Experiments.UpdateExperiment]. + experiment (:class:`google.cloud.dialogflowcx_v3.types.Experiment`): + Required. The experiment to update. + This corresponds to the ``experiment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The mask to control which + fields get updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Experiment: + Represents an experiment in an + environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([experiment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_experiment.UpdateExperimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if experiment is not None: + request.experiment = experiment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_experiment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("experiment.name", request.experiment.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_experiment( + self, + request: Optional[Union[experiment.DeleteExperimentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteExperimentRequest( + name="name_value", + ) + + # Make the request + await client.delete_experiment(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeleteExperimentRequest, dict]]): + The request object. The request message for + [Experiments.DeleteExperiment][google.cloud.dialogflow.cx.v3.Experiments.DeleteExperiment]. + name (:class:`str`): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment] + to delete. Format: + ``projects//locations//agents//environments//experiments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = experiment.DeleteExperimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_experiment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def start_experiment( + self, + request: Optional[Union[experiment.StartExperimentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> experiment.Experiment: + r"""Starts the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. This rpc + only changes the state of experiment from PENDING to RUNNING. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_start_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.StartExperimentRequest( + name="name_value", + ) + + # Make the request + response = await client.start_experiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.StartExperimentRequest, dict]]): + The request object. The request message for + [Experiments.StartExperiment][google.cloud.dialogflow.cx.v3.Experiments.StartExperiment]. + name (:class:`str`): + Required. Resource name of the experiment to start. + Format: + ``projects//locations//agents//environments//experiments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Experiment: + Represents an experiment in an + environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = experiment.StartExperimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_experiment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def stop_experiment( + self, + request: Optional[Union[experiment.StopExperimentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> experiment.Experiment: + r"""Stops the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. This rpc + only changes the state of experiment from RUNNING to DONE. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_stop_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.StopExperimentRequest( + name="name_value", + ) + + # Make the request + response = await client.stop_experiment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.StopExperimentRequest, dict]]): + The request object. The request message for + [Experiments.StopExperiment][google.cloud.dialogflow.cx.v3.Experiments.StopExperiment]. + name (:class:`str`): + Required. Resource name of the experiment to stop. + Format: + ``projects//locations//agents//environments//experiments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Experiment: + Represents an experiment in an + environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = experiment.StopExperimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stop_experiment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ExperimentsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ExperimentsAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py new file mode 100644 index 000000000000..5cc1888b1d5b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py @@ -0,0 +1,1535 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.experiments import pagers +from google.cloud.dialogflowcx_v3.types import experiment +from google.cloud.dialogflowcx_v3.types import experiment as gcdc_experiment + +from .transports.base import DEFAULT_CLIENT_INFO, ExperimentsTransport +from .transports.grpc import ExperimentsGrpcTransport +from .transports.grpc_asyncio import ExperimentsGrpcAsyncIOTransport +from .transports.rest import ExperimentsRestTransport + + +class ExperimentsClientMeta(type): + """Metaclass for the Experiments client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ExperimentsTransport]] + _transport_registry["grpc"] = ExperimentsGrpcTransport + _transport_registry["grpc_asyncio"] = ExperimentsGrpcAsyncIOTransport + _transport_registry["rest"] = ExperimentsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ExperimentsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ExperimentsClient(metaclass=ExperimentsClientMeta): + """Service for managing + [Experiments][google.cloud.dialogflow.cx.v3.Experiment]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ExperimentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ExperimentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ExperimentsTransport: + """Returns the transport used by the client instance. + + Returns: + ExperimentsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def experiment_path( + project: str, + location: str, + agent: str, + environment: str, + experiment: str, + ) -> str: + """Returns a fully-qualified experiment string.""" + return "projects/{project}/locations/{location}/agents/{agent}/environments/{environment}/experiments/{experiment}".format( + project=project, + location=location, + agent=agent, + environment=environment, + experiment=experiment, + ) + + @staticmethod + def parse_experiment_path(path: str) -> Dict[str, str]: + """Parses a experiment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/environments/(?P.+?)/experiments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def version_path( + project: str, + location: str, + agent: str, + flow: str, + version: str, + ) -> str: + """Returns a fully-qualified version string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/versions/{version}".format( + project=project, + location=location, + agent=agent, + flow=flow, + version=version, + ) + + @staticmethod + def parse_version_path(path: str) -> Dict[str, str]: + """Parses a version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/versions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ExperimentsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the experiments client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ExperimentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ExperimentsTransport): + # transport is a ExperimentsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_experiments( + self, + request: Optional[Union[experiment.ListExperimentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExperimentsPager: + r"""Returns the list of all experiments in the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_experiments(): + # Create a client + client = dialogflowcx_v3.ExperimentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListExperimentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_experiments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListExperimentsRequest, dict]): + The request object. The request message for + [Experiments.ListExperiments][google.cloud.dialogflow.cx.v3.Experiments.ListExperiments]. + parent (str): + Required. The + [Environment][google.cloud.dialogflow.cx.v3.Environment] + to list all environments for. Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.experiments.pagers.ListExperimentsPager: + The response message for + [Experiments.ListExperiments][google.cloud.dialogflow.cx.v3.Experiments.ListExperiments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a experiment.ListExperimentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, experiment.ListExperimentsRequest): + request = experiment.ListExperimentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_experiments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExperimentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_experiment( + self, + request: Optional[Union[experiment.GetExperimentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> experiment.Experiment: + r"""Retrieves the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetExperimentRequest( + name="name_value", + ) + + # Make the request + response = client.get_experiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetExperimentRequest, dict]): + The request object. The request message for + [Experiments.GetExperiment][google.cloud.dialogflow.cx.v3.Experiments.GetExperiment]. + name (str): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + Format: + ``projects//locations//agents//environments//experiments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Experiment: + Represents an experiment in an + environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a experiment.GetExperimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, experiment.GetExperimentRequest): + request = experiment.GetExperimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_experiment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_experiment( + self, + request: Optional[Union[gcdc_experiment.CreateExperimentRequest, dict]] = None, + *, + parent: Optional[str] = None, + experiment: Optional[gcdc_experiment.Experiment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_experiment.Experiment: + r"""Creates an + [Experiment][google.cloud.dialogflow.cx.v3.Experiment] in the + specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsClient() + + # Initialize request argument(s) + experiment = dialogflowcx_v3.Experiment() + experiment.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateExperimentRequest( + parent="parent_value", + experiment=experiment, + ) + + # Make the request + response = client.create_experiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateExperimentRequest, dict]): + The request object. The request message for + [Experiments.CreateExperiment][google.cloud.dialogflow.cx.v3.Experiments.CreateExperiment]. + parent (str): + Required. The + [Agent][google.cloud.dialogflow.cx.v3.Agent] to create + an + [Environment][google.cloud.dialogflow.cx.v3.Environment] + for. Format: + ``projects//locations//agents//environments/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + experiment (google.cloud.dialogflowcx_v3.types.Experiment): + Required. The experiment to create. + This corresponds to the ``experiment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Experiment: + Represents an experiment in an + environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, experiment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_experiment.CreateExperimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_experiment.CreateExperimentRequest): + request = gcdc_experiment.CreateExperimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if experiment is not None: + request.experiment = experiment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_experiment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_experiment( + self, + request: Optional[Union[gcdc_experiment.UpdateExperimentRequest, dict]] = None, + *, + experiment: Optional[gcdc_experiment.Experiment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_experiment.Experiment: + r"""Updates the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsClient() + + # Initialize request argument(s) + experiment = dialogflowcx_v3.Experiment() + experiment.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateExperimentRequest( + experiment=experiment, + ) + + # Make the request + response = client.update_experiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateExperimentRequest, dict]): + The request object. The request message for + [Experiments.UpdateExperiment][google.cloud.dialogflow.cx.v3.Experiments.UpdateExperiment]. + experiment (google.cloud.dialogflowcx_v3.types.Experiment): + Required. The experiment to update. + This corresponds to the ``experiment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The mask to control which + fields get updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Experiment: + Represents an experiment in an + environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([experiment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_experiment.UpdateExperimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_experiment.UpdateExperimentRequest): + request = gcdc_experiment.UpdateExperimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if experiment is not None: + request.experiment = experiment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_experiment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("experiment.name", request.experiment.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_experiment( + self, + request: Optional[Union[experiment.DeleteExperimentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteExperimentRequest( + name="name_value", + ) + + # Make the request + client.delete_experiment(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeleteExperimentRequest, dict]): + The request object. The request message for + [Experiments.DeleteExperiment][google.cloud.dialogflow.cx.v3.Experiments.DeleteExperiment]. + name (str): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment] + to delete. Format: + ``projects//locations//agents//environments//experiments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a experiment.DeleteExperimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, experiment.DeleteExperimentRequest): + request = experiment.DeleteExperimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_experiment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def start_experiment( + self, + request: Optional[Union[experiment.StartExperimentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> experiment.Experiment: + r"""Starts the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. This rpc + only changes the state of experiment from PENDING to RUNNING. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_start_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.StartExperimentRequest( + name="name_value", + ) + + # Make the request + response = client.start_experiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.StartExperimentRequest, dict]): + The request object. The request message for + [Experiments.StartExperiment][google.cloud.dialogflow.cx.v3.Experiments.StartExperiment]. + name (str): + Required. Resource name of the experiment to start. + Format: + ``projects//locations//agents//environments//experiments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Experiment: + Represents an experiment in an + environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a experiment.StartExperimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, experiment.StartExperimentRequest): + request = experiment.StartExperimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_experiment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stop_experiment( + self, + request: Optional[Union[experiment.StopExperimentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> experiment.Experiment: + r"""Stops the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. This rpc + only changes the state of experiment from RUNNING to DONE. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_stop_experiment(): + # Create a client + client = dialogflowcx_v3.ExperimentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.StopExperimentRequest( + name="name_value", + ) + + # Make the request + response = client.stop_experiment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.StopExperimentRequest, dict]): + The request object. The request message for + [Experiments.StopExperiment][google.cloud.dialogflow.cx.v3.Experiments.StopExperiment]. + name (str): + Required. Resource name of the experiment to stop. + Format: + ``projects//locations//agents//environments//experiments/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Experiment: + Represents an experiment in an + environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a experiment.StopExperimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, experiment.StopExperimentRequest): + request = experiment.StopExperimentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_experiment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ExperimentsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ExperimentsClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/pagers.py new file mode 100644 index 000000000000..cf1503853f1f --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import experiment + + +class ListExperimentsPager: + """A pager for iterating through ``list_experiments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListExperimentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``experiments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListExperiments`` requests and continue to iterate + through the ``experiments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListExperimentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., experiment.ListExperimentsResponse], + request: experiment.ListExperimentsRequest, + response: experiment.ListExperimentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListExperimentsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListExperimentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = experiment.ListExperimentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[experiment.ListExperimentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[experiment.Experiment]: + for page in self.pages: + yield from page.experiments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExperimentsAsyncPager: + """A pager for iterating through ``list_experiments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListExperimentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``experiments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListExperiments`` requests and continue to iterate + through the ``experiments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListExperimentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[experiment.ListExperimentsResponse]], + request: experiment.ListExperimentsRequest, + response: experiment.ListExperimentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListExperimentsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListExperimentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = experiment.ListExperimentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[experiment.ListExperimentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[experiment.Experiment]: + async def async_generator(): + async for page in self.pages: + for response in page.experiments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/__init__.py new file mode 100644 index 000000000000..c0693dbff0fa --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ExperimentsTransport +from .grpc import ExperimentsGrpcTransport +from .grpc_asyncio import ExperimentsGrpcAsyncIOTransport +from .rest import ExperimentsRestInterceptor, ExperimentsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ExperimentsTransport]] +_transport_registry["grpc"] = ExperimentsGrpcTransport +_transport_registry["grpc_asyncio"] = ExperimentsGrpcAsyncIOTransport +_transport_registry["rest"] = ExperimentsRestTransport + +__all__ = ( + "ExperimentsTransport", + "ExperimentsGrpcTransport", + "ExperimentsGrpcAsyncIOTransport", + "ExperimentsRestTransport", + "ExperimentsRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/base.py new file mode 100644 index 000000000000..27e76e858a49 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/base.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import experiment +from google.cloud.dialogflowcx_v3.types import experiment as gcdc_experiment + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ExperimentsTransport(abc.ABC): + """Abstract transport class for Experiments.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_experiments: gapic_v1.method.wrap_method( + self.list_experiments, + default_timeout=None, + client_info=client_info, + ), + self.get_experiment: gapic_v1.method.wrap_method( + self.get_experiment, + default_timeout=None, + client_info=client_info, + ), + self.create_experiment: gapic_v1.method.wrap_method( + self.create_experiment, + default_timeout=None, + client_info=client_info, + ), + self.update_experiment: gapic_v1.method.wrap_method( + self.update_experiment, + default_timeout=None, + client_info=client_info, + ), + self.delete_experiment: gapic_v1.method.wrap_method( + self.delete_experiment, + default_timeout=None, + client_info=client_info, + ), + self.start_experiment: gapic_v1.method.wrap_method( + self.start_experiment, + default_timeout=None, + client_info=client_info, + ), + self.stop_experiment: gapic_v1.method.wrap_method( + self.stop_experiment, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_experiments( + self, + ) -> Callable[ + [experiment.ListExperimentsRequest], + Union[ + experiment.ListExperimentsResponse, + Awaitable[experiment.ListExperimentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_experiment( + self, + ) -> Callable[ + [experiment.GetExperimentRequest], + Union[experiment.Experiment, Awaitable[experiment.Experiment]], + ]: + raise NotImplementedError() + + @property + def create_experiment( + self, + ) -> Callable[ + [gcdc_experiment.CreateExperimentRequest], + Union[gcdc_experiment.Experiment, Awaitable[gcdc_experiment.Experiment]], + ]: + raise NotImplementedError() + + @property + def update_experiment( + self, + ) -> Callable[ + [gcdc_experiment.UpdateExperimentRequest], + Union[gcdc_experiment.Experiment, Awaitable[gcdc_experiment.Experiment]], + ]: + raise NotImplementedError() + + @property + def delete_experiment( + self, + ) -> Callable[ + [experiment.DeleteExperimentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def start_experiment( + self, + ) -> Callable[ + [experiment.StartExperimentRequest], + Union[experiment.Experiment, Awaitable[experiment.Experiment]], + ]: + raise NotImplementedError() + + @property + def stop_experiment( + self, + ) -> Callable[ + [experiment.StopExperimentRequest], + Union[experiment.Experiment, Awaitable[experiment.Experiment]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ExperimentsTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/grpc.py new file mode 100644 index 000000000000..afca24cad907 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/grpc.py @@ -0,0 +1,532 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import experiment +from google.cloud.dialogflowcx_v3.types import experiment as gcdc_experiment + +from .base import DEFAULT_CLIENT_INFO, ExperimentsTransport + + +class ExperimentsGrpcTransport(ExperimentsTransport): + """gRPC backend transport for Experiments. + + Service for managing + [Experiments][google.cloud.dialogflow.cx.v3.Experiment]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_experiments( + self, + ) -> Callable[ + [experiment.ListExperimentsRequest], experiment.ListExperimentsResponse + ]: + r"""Return a callable for the list experiments method over gRPC. + + Returns the list of all experiments in the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.ListExperimentsRequest], + ~.ListExperimentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_experiments" not in self._stubs: + self._stubs["list_experiments"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/ListExperiments", + request_serializer=experiment.ListExperimentsRequest.serialize, + response_deserializer=experiment.ListExperimentsResponse.deserialize, + ) + return self._stubs["list_experiments"] + + @property + def get_experiment( + self, + ) -> Callable[[experiment.GetExperimentRequest], experiment.Experiment]: + r"""Return a callable for the get experiment method over gRPC. + + Retrieves the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + Returns: + Callable[[~.GetExperimentRequest], + ~.Experiment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_experiment" not in self._stubs: + self._stubs["get_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/GetExperiment", + request_serializer=experiment.GetExperimentRequest.serialize, + response_deserializer=experiment.Experiment.deserialize, + ) + return self._stubs["get_experiment"] + + @property + def create_experiment( + self, + ) -> Callable[ + [gcdc_experiment.CreateExperimentRequest], gcdc_experiment.Experiment + ]: + r"""Return a callable for the create experiment method over gRPC. + + Creates an + [Experiment][google.cloud.dialogflow.cx.v3.Experiment] in the + specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.CreateExperimentRequest], + ~.Experiment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_experiment" not in self._stubs: + self._stubs["create_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/CreateExperiment", + request_serializer=gcdc_experiment.CreateExperimentRequest.serialize, + response_deserializer=gcdc_experiment.Experiment.deserialize, + ) + return self._stubs["create_experiment"] + + @property + def update_experiment( + self, + ) -> Callable[ + [gcdc_experiment.UpdateExperimentRequest], gcdc_experiment.Experiment + ]: + r"""Return a callable for the update experiment method over gRPC. + + Updates the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + Returns: + Callable[[~.UpdateExperimentRequest], + ~.Experiment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_experiment" not in self._stubs: + self._stubs["update_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/UpdateExperiment", + request_serializer=gcdc_experiment.UpdateExperimentRequest.serialize, + response_deserializer=gcdc_experiment.Experiment.deserialize, + ) + return self._stubs["update_experiment"] + + @property + def delete_experiment( + self, + ) -> Callable[[experiment.DeleteExperimentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete experiment method over gRPC. + + Deletes the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + Returns: + Callable[[~.DeleteExperimentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_experiment" not in self._stubs: + self._stubs["delete_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/DeleteExperiment", + request_serializer=experiment.DeleteExperimentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_experiment"] + + @property + def start_experiment( + self, + ) -> Callable[[experiment.StartExperimentRequest], experiment.Experiment]: + r"""Return a callable for the start experiment method over gRPC. + + Starts the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. This rpc + only changes the state of experiment from PENDING to RUNNING. + + Returns: + Callable[[~.StartExperimentRequest], + ~.Experiment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_experiment" not in self._stubs: + self._stubs["start_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/StartExperiment", + request_serializer=experiment.StartExperimentRequest.serialize, + response_deserializer=experiment.Experiment.deserialize, + ) + return self._stubs["start_experiment"] + + @property + def stop_experiment( + self, + ) -> Callable[[experiment.StopExperimentRequest], experiment.Experiment]: + r"""Return a callable for the stop experiment method over gRPC. + + Stops the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. This rpc + only changes the state of experiment from RUNNING to DONE. + + Returns: + Callable[[~.StopExperimentRequest], + ~.Experiment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_experiment" not in self._stubs: + self._stubs["stop_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/StopExperiment", + request_serializer=experiment.StopExperimentRequest.serialize, + response_deserializer=experiment.Experiment.deserialize, + ) + return self._stubs["stop_experiment"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ExperimentsGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/grpc_asyncio.py new file mode 100644 index 000000000000..e3b58e8a04e2 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/grpc_asyncio.py @@ -0,0 +1,534 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import experiment +from google.cloud.dialogflowcx_v3.types import experiment as gcdc_experiment + +from .base import DEFAULT_CLIENT_INFO, ExperimentsTransport +from .grpc import ExperimentsGrpcTransport + + +class ExperimentsGrpcAsyncIOTransport(ExperimentsTransport): + """gRPC AsyncIO backend transport for Experiments. + + Service for managing + [Experiments][google.cloud.dialogflow.cx.v3.Experiment]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_experiments( + self, + ) -> Callable[ + [experiment.ListExperimentsRequest], + Awaitable[experiment.ListExperimentsResponse], + ]: + r"""Return a callable for the list experiments method over gRPC. + + Returns the list of all experiments in the specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.ListExperimentsRequest], + Awaitable[~.ListExperimentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_experiments" not in self._stubs: + self._stubs["list_experiments"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/ListExperiments", + request_serializer=experiment.ListExperimentsRequest.serialize, + response_deserializer=experiment.ListExperimentsResponse.deserialize, + ) + return self._stubs["list_experiments"] + + @property + def get_experiment( + self, + ) -> Callable[[experiment.GetExperimentRequest], Awaitable[experiment.Experiment]]: + r"""Return a callable for the get experiment method over gRPC. + + Retrieves the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + Returns: + Callable[[~.GetExperimentRequest], + Awaitable[~.Experiment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_experiment" not in self._stubs: + self._stubs["get_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/GetExperiment", + request_serializer=experiment.GetExperimentRequest.serialize, + response_deserializer=experiment.Experiment.deserialize, + ) + return self._stubs["get_experiment"] + + @property + def create_experiment( + self, + ) -> Callable[ + [gcdc_experiment.CreateExperimentRequest], Awaitable[gcdc_experiment.Experiment] + ]: + r"""Return a callable for the create experiment method over gRPC. + + Creates an + [Experiment][google.cloud.dialogflow.cx.v3.Experiment] in the + specified + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + + Returns: + Callable[[~.CreateExperimentRequest], + Awaitable[~.Experiment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_experiment" not in self._stubs: + self._stubs["create_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/CreateExperiment", + request_serializer=gcdc_experiment.CreateExperimentRequest.serialize, + response_deserializer=gcdc_experiment.Experiment.deserialize, + ) + return self._stubs["create_experiment"] + + @property + def update_experiment( + self, + ) -> Callable[ + [gcdc_experiment.UpdateExperimentRequest], Awaitable[gcdc_experiment.Experiment] + ]: + r"""Return a callable for the update experiment method over gRPC. + + Updates the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + Returns: + Callable[[~.UpdateExperimentRequest], + Awaitable[~.Experiment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_experiment" not in self._stubs: + self._stubs["update_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/UpdateExperiment", + request_serializer=gcdc_experiment.UpdateExperimentRequest.serialize, + response_deserializer=gcdc_experiment.Experiment.deserialize, + ) + return self._stubs["update_experiment"] + + @property + def delete_experiment( + self, + ) -> Callable[[experiment.DeleteExperimentRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete experiment method over gRPC. + + Deletes the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. + + Returns: + Callable[[~.DeleteExperimentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_experiment" not in self._stubs: + self._stubs["delete_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/DeleteExperiment", + request_serializer=experiment.DeleteExperimentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_experiment"] + + @property + def start_experiment( + self, + ) -> Callable[ + [experiment.StartExperimentRequest], Awaitable[experiment.Experiment] + ]: + r"""Return a callable for the start experiment method over gRPC. + + Starts the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. This rpc + only changes the state of experiment from PENDING to RUNNING. + + Returns: + Callable[[~.StartExperimentRequest], + Awaitable[~.Experiment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_experiment" not in self._stubs: + self._stubs["start_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/StartExperiment", + request_serializer=experiment.StartExperimentRequest.serialize, + response_deserializer=experiment.Experiment.deserialize, + ) + return self._stubs["start_experiment"] + + @property + def stop_experiment( + self, + ) -> Callable[[experiment.StopExperimentRequest], Awaitable[experiment.Experiment]]: + r"""Return a callable for the stop experiment method over gRPC. + + Stops the specified + [Experiment][google.cloud.dialogflow.cx.v3.Experiment]. This rpc + only changes the state of experiment from RUNNING to DONE. + + Returns: + Callable[[~.StopExperimentRequest], + Awaitable[~.Experiment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_experiment" not in self._stubs: + self._stubs["stop_experiment"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Experiments/StopExperiment", + request_serializer=experiment.StopExperimentRequest.serialize, + response_deserializer=experiment.Experiment.deserialize, + ) + return self._stubs["stop_experiment"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("ExperimentsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/rest.py new file mode 100644 index 000000000000..29d348c1ead8 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/rest.py @@ -0,0 +1,1560 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import experiment +from google.cloud.dialogflowcx_v3.types import experiment as gcdc_experiment + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ExperimentsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ExperimentsRestInterceptor: + """Interceptor for Experiments. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ExperimentsRestTransport. + + .. code-block:: python + class MyCustomExperimentsInterceptor(ExperimentsRestInterceptor): + def pre_create_experiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_experiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_experiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_experiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_experiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_experiments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_experiments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start_experiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start_experiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stop_experiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop_experiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_experiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_experiment(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ExperimentsRestTransport(interceptor=MyCustomExperimentsInterceptor()) + client = ExperimentsClient(transport=transport) + + + """ + + def pre_create_experiment( + self, + request: gcdc_experiment.CreateExperimentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_experiment.CreateExperimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_experiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def post_create_experiment( + self, response: gcdc_experiment.Experiment + ) -> gcdc_experiment.Experiment: + """Post-rpc interceptor for create_experiment + + Override in a subclass to manipulate the response + after it is returned by the Experiments server but before + it is returned to user code. + """ + return response + + def pre_delete_experiment( + self, + request: experiment.DeleteExperimentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[experiment.DeleteExperimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_experiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def pre_get_experiment( + self, + request: experiment.GetExperimentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[experiment.GetExperimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_experiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def post_get_experiment( + self, response: experiment.Experiment + ) -> experiment.Experiment: + """Post-rpc interceptor for get_experiment + + Override in a subclass to manipulate the response + after it is returned by the Experiments server but before + it is returned to user code. + """ + return response + + def pre_list_experiments( + self, + request: experiment.ListExperimentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[experiment.ListExperimentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_experiments + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def post_list_experiments( + self, response: experiment.ListExperimentsResponse + ) -> experiment.ListExperimentsResponse: + """Post-rpc interceptor for list_experiments + + Override in a subclass to manipulate the response + after it is returned by the Experiments server but before + it is returned to user code. + """ + return response + + def pre_start_experiment( + self, + request: experiment.StartExperimentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[experiment.StartExperimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_experiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def post_start_experiment( + self, response: experiment.Experiment + ) -> experiment.Experiment: + """Post-rpc interceptor for start_experiment + + Override in a subclass to manipulate the response + after it is returned by the Experiments server but before + it is returned to user code. + """ + return response + + def pre_stop_experiment( + self, + request: experiment.StopExperimentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[experiment.StopExperimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_experiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def post_stop_experiment( + self, response: experiment.Experiment + ) -> experiment.Experiment: + """Post-rpc interceptor for stop_experiment + + Override in a subclass to manipulate the response + after it is returned by the Experiments server but before + it is returned to user code. + """ + return response + + def pre_update_experiment( + self, + request: gcdc_experiment.UpdateExperimentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_experiment.UpdateExperimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_experiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def post_update_experiment( + self, response: gcdc_experiment.Experiment + ) -> gcdc_experiment.Experiment: + """Post-rpc interceptor for update_experiment + + Override in a subclass to manipulate the response + after it is returned by the Experiments server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Experiments server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Experiments server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Experiments server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Experiments server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Experiments server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Experiments server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ExperimentsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ExperimentsRestInterceptor + + +class ExperimentsRestTransport(ExperimentsTransport): + """REST backend transport for Experiments. + + Service for managing + [Experiments][google.cloud.dialogflow.cx.v3.Experiment]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ExperimentsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ExperimentsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateExperiment(ExperimentsRestStub): + def __hash__(self): + return hash("CreateExperiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_experiment.CreateExperimentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_experiment.Experiment: + r"""Call the create experiment method over HTTP. + + Args: + request (~.gcdc_experiment.CreateExperimentRequest): + The request object. The request message for + [Experiments.CreateExperiment][google.cloud.dialogflow.cx.v3.Experiments.CreateExperiment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_experiment.Experiment: + Represents an experiment in an + environment. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/environments/*}/experiments", + "body": "experiment", + }, + ] + request, metadata = self._interceptor.pre_create_experiment( + request, metadata + ) + pb_request = gcdc_experiment.CreateExperimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_experiment.Experiment() + pb_resp = gcdc_experiment.Experiment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_experiment(resp) + return resp + + class _DeleteExperiment(ExperimentsRestStub): + def __hash__(self): + return hash("DeleteExperiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: experiment.DeleteExperimentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete experiment method over HTTP. + + Args: + request (~.experiment.DeleteExperimentRequest): + The request object. The request message for + [Experiments.DeleteExperiment][google.cloud.dialogflow.cx.v3.Experiments.DeleteExperiment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/environments/*/experiments/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_experiment( + request, metadata + ) + pb_request = experiment.DeleteExperimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetExperiment(ExperimentsRestStub): + def __hash__(self): + return hash("GetExperiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: experiment.GetExperimentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> experiment.Experiment: + r"""Call the get experiment method over HTTP. + + Args: + request (~.experiment.GetExperimentRequest): + The request object. The request message for + [Experiments.GetExperiment][google.cloud.dialogflow.cx.v3.Experiments.GetExperiment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.experiment.Experiment: + Represents an experiment in an + environment. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/environments/*/experiments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_experiment(request, metadata) + pb_request = experiment.GetExperimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = experiment.Experiment() + pb_resp = experiment.Experiment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_experiment(resp) + return resp + + class _ListExperiments(ExperimentsRestStub): + def __hash__(self): + return hash("ListExperiments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: experiment.ListExperimentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> experiment.ListExperimentsResponse: + r"""Call the list experiments method over HTTP. + + Args: + request (~.experiment.ListExperimentsRequest): + The request object. The request message for + [Experiments.ListExperiments][google.cloud.dialogflow.cx.v3.Experiments.ListExperiments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.experiment.ListExperimentsResponse: + The response message for + [Experiments.ListExperiments][google.cloud.dialogflow.cx.v3.Experiments.ListExperiments]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/environments/*}/experiments", + }, + ] + request, metadata = self._interceptor.pre_list_experiments( + request, metadata + ) + pb_request = experiment.ListExperimentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = experiment.ListExperimentsResponse() + pb_resp = experiment.ListExperimentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_experiments(resp) + return resp + + class _StartExperiment(ExperimentsRestStub): + def __hash__(self): + return hash("StartExperiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: experiment.StartExperimentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> experiment.Experiment: + r"""Call the start experiment method over HTTP. + + Args: + request (~.experiment.StartExperimentRequest): + The request object. The request message for + [Experiments.StartExperiment][google.cloud.dialogflow.cx.v3.Experiments.StartExperiment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.experiment.Experiment: + Represents an experiment in an + environment. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/agents/*/environments/*/experiments/*}:start", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_start_experiment( + request, metadata + ) + pb_request = experiment.StartExperimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = experiment.Experiment() + pb_resp = experiment.Experiment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start_experiment(resp) + return resp + + class _StopExperiment(ExperimentsRestStub): + def __hash__(self): + return hash("StopExperiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: experiment.StopExperimentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> experiment.Experiment: + r"""Call the stop experiment method over HTTP. + + Args: + request (~.experiment.StopExperimentRequest): + The request object. The request message for + [Experiments.StopExperiment][google.cloud.dialogflow.cx.v3.Experiments.StopExperiment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.experiment.Experiment: + Represents an experiment in an + environment. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/agents/*/environments/*/experiments/*}:stop", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_stop_experiment(request, metadata) + pb_request = experiment.StopExperimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = experiment.Experiment() + pb_resp = experiment.Experiment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop_experiment(resp) + return resp + + class _UpdateExperiment(ExperimentsRestStub): + def __hash__(self): + return hash("UpdateExperiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_experiment.UpdateExperimentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_experiment.Experiment: + r"""Call the update experiment method over HTTP. + + Args: + request (~.gcdc_experiment.UpdateExperimentRequest): + The request object. The request message for + [Experiments.UpdateExperiment][google.cloud.dialogflow.cx.v3.Experiments.UpdateExperiment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_experiment.Experiment: + Represents an experiment in an + environment. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{experiment.name=projects/*/locations/*/agents/*/environments/*/experiments/*}", + "body": "experiment", + }, + ] + request, metadata = self._interceptor.pre_update_experiment( + request, metadata + ) + pb_request = gcdc_experiment.UpdateExperimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_experiment.Experiment() + pb_resp = gcdc_experiment.Experiment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_experiment(resp) + return resp + + @property + def create_experiment( + self, + ) -> Callable[ + [gcdc_experiment.CreateExperimentRequest], gcdc_experiment.Experiment + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateExperiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_experiment( + self, + ) -> Callable[[experiment.DeleteExperimentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteExperiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_experiment( + self, + ) -> Callable[[experiment.GetExperimentRequest], experiment.Experiment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetExperiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_experiments( + self, + ) -> Callable[ + [experiment.ListExperimentsRequest], experiment.ListExperimentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListExperiments(self._session, self._host, self._interceptor) # type: ignore + + @property + def start_experiment( + self, + ) -> Callable[[experiment.StartExperimentRequest], experiment.Experiment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StartExperiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def stop_experiment( + self, + ) -> Callable[[experiment.StopExperimentRequest], experiment.Experiment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StopExperiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_experiment( + self, + ) -> Callable[ + [gcdc_experiment.UpdateExperimentRequest], gcdc_experiment.Experiment + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateExperiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(ExperimentsRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(ExperimentsRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ExperimentsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ExperimentsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ExperimentsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ExperimentsRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/__init__.py new file mode 100644 index 000000000000..6ec046f4e7fa --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import FlowsAsyncClient +from .client import FlowsClient + +__all__ = ( + "FlowsClient", + "FlowsAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py new file mode 100644 index 000000000000..a7bddb9405a8 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py @@ -0,0 +1,1664 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.flows import pagers +from google.cloud.dialogflowcx_v3.types import flow +from google.cloud.dialogflowcx_v3.types import flow as gcdc_flow +from google.cloud.dialogflowcx_v3.types import page, validation_message + +from .client import FlowsClient +from .transports.base import DEFAULT_CLIENT_INFO, FlowsTransport +from .transports.grpc_asyncio import FlowsGrpcAsyncIOTransport + + +class FlowsAsyncClient: + """Service for managing [Flows][google.cloud.dialogflow.cx.v3.Flow].""" + + _client: FlowsClient + + DEFAULT_ENDPOINT = FlowsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = FlowsClient.DEFAULT_MTLS_ENDPOINT + + flow_path = staticmethod(FlowsClient.flow_path) + parse_flow_path = staticmethod(FlowsClient.parse_flow_path) + flow_validation_result_path = staticmethod(FlowsClient.flow_validation_result_path) + parse_flow_validation_result_path = staticmethod( + FlowsClient.parse_flow_validation_result_path + ) + intent_path = staticmethod(FlowsClient.intent_path) + parse_intent_path = staticmethod(FlowsClient.parse_intent_path) + page_path = staticmethod(FlowsClient.page_path) + parse_page_path = staticmethod(FlowsClient.parse_page_path) + transition_route_group_path = staticmethod(FlowsClient.transition_route_group_path) + parse_transition_route_group_path = staticmethod( + FlowsClient.parse_transition_route_group_path + ) + webhook_path = staticmethod(FlowsClient.webhook_path) + parse_webhook_path = staticmethod(FlowsClient.parse_webhook_path) + common_billing_account_path = staticmethod(FlowsClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod( + FlowsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(FlowsClient.common_folder_path) + parse_common_folder_path = staticmethod(FlowsClient.parse_common_folder_path) + common_organization_path = staticmethod(FlowsClient.common_organization_path) + parse_common_organization_path = staticmethod( + FlowsClient.parse_common_organization_path + ) + common_project_path = staticmethod(FlowsClient.common_project_path) + parse_common_project_path = staticmethod(FlowsClient.parse_common_project_path) + common_location_path = staticmethod(FlowsClient.common_location_path) + parse_common_location_path = staticmethod(FlowsClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlowsAsyncClient: The constructed client. + """ + return FlowsClient.from_service_account_info.__func__(FlowsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlowsAsyncClient: The constructed client. + """ + return FlowsClient.from_service_account_file.__func__(FlowsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return FlowsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> FlowsTransport: + """Returns the transport used by the client instance. + + Returns: + FlowsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(FlowsClient).get_transport_class, type(FlowsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, FlowsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the flows client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.FlowsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = FlowsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_flow( + self, + request: Optional[Union[gcdc_flow.CreateFlowRequest, dict]] = None, + *, + parent: Optional[str] = None, + flow: Optional[gcdc_flow.Flow] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_flow.Flow: + r"""Creates a flow in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_flow(): + # Create a client + client = dialogflowcx_v3.FlowsAsyncClient() + + # Initialize request argument(s) + flow = dialogflowcx_v3.Flow() + flow.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateFlowRequest( + parent="parent_value", + flow=flow, + ) + + # Make the request + response = await client.create_flow(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateFlowRequest, dict]]): + The request object. The request message for + [Flows.CreateFlow][google.cloud.dialogflow.cx.v3.Flows.CreateFlow]. + parent (:class:`str`): + Required. The agent to create a flow for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + flow (:class:`google.cloud.dialogflowcx_v3.types.Flow`): + Required. The flow to create. + This corresponds to the ``flow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Flow: + Flows represents the conversation + flows when you build your chatbot agent. + A flow consists of many pages connected + by the transition routes. Conversations + always start with the built-in Start + Flow (with an all-0 ID). Transition + routes can direct the conversation + session from the current flow (parent + flow) to another flow (sub flow). When + the sub flow is finished, Dialogflow + will bring the session back to the + parent flow, where the sub flow is + started. + + Usually, when a transition route is + followed by a matched intent, the intent + will be "consumed". This means the + intent won't activate more transition + routes. However, when the followed + transition route moves the conversation + session into a different flow, the + matched intent can be carried over and + to be consumed in the target flow. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, flow]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_flow.CreateFlowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if flow is not None: + request.flow = flow + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_flow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_flow( + self, + request: Optional[Union[flow.DeleteFlowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a specified flow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_flow(): + # Create a client + client = dialogflowcx_v3.FlowsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteFlowRequest( + name="name_value", + ) + + # Make the request + await client.delete_flow(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeleteFlowRequest, dict]]): + The request object. The request message for + [Flows.DeleteFlow][google.cloud.dialogflow.cx.v3.Flows.DeleteFlow]. + name (:class:`str`): + Required. The name of the flow to delete. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = flow.DeleteFlowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_flow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_flows( + self, + request: Optional[Union[flow.ListFlowsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListFlowsAsyncPager: + r"""Returns the list of all flows in the specified agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_flows(): + # Create a client + client = dialogflowcx_v3.FlowsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListFlowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_flows(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListFlowsRequest, dict]]): + The request object. The request message for + [Flows.ListFlows][google.cloud.dialogflow.cx.v3.Flows.ListFlows]. + parent (:class:`str`): + Required. The agent containing the flows. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.flows.pagers.ListFlowsAsyncPager: + The response message for + [Flows.ListFlows][google.cloud.dialogflow.cx.v3.Flows.ListFlows]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = flow.ListFlowsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_flows, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListFlowsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_flow( + self, + request: Optional[Union[flow.GetFlowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> flow.Flow: + r"""Retrieves the specified flow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_flow(): + # Create a client + client = dialogflowcx_v3.FlowsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetFlowRequest( + name="name_value", + ) + + # Make the request + response = await client.get_flow(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetFlowRequest, dict]]): + The request object. The response message for + [Flows.GetFlow][google.cloud.dialogflow.cx.v3.Flows.GetFlow]. + name (:class:`str`): + Required. The name of the flow to get. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Flow: + Flows represents the conversation + flows when you build your chatbot agent. + A flow consists of many pages connected + by the transition routes. Conversations + always start with the built-in Start + Flow (with an all-0 ID). Transition + routes can direct the conversation + session from the current flow (parent + flow) to another flow (sub flow). When + the sub flow is finished, Dialogflow + will bring the session back to the + parent flow, where the sub flow is + started. + + Usually, when a transition route is + followed by a matched intent, the intent + will be "consumed". This means the + intent won't activate more transition + routes. However, when the followed + transition route moves the conversation + session into a different flow, the + matched intent can be carried over and + to be consumed in the target flow. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = flow.GetFlowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_flow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_flow( + self, + request: Optional[Union[gcdc_flow.UpdateFlowRequest, dict]] = None, + *, + flow: Optional[gcdc_flow.Flow] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_flow.Flow: + r"""Updates the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_flow(): + # Create a client + client = dialogflowcx_v3.FlowsAsyncClient() + + # Initialize request argument(s) + flow = dialogflowcx_v3.Flow() + flow.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateFlowRequest( + flow=flow, + ) + + # Make the request + response = await client.update_flow(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateFlowRequest, dict]]): + The request object. The request message for + [Flows.UpdateFlow][google.cloud.dialogflow.cx.v3.Flows.UpdateFlow]. + flow (:class:`google.cloud.dialogflowcx_v3.types.Flow`): + Required. The flow to update. + This corresponds to the ``flow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The mask to control which fields get + updated. If the mask is not present, all + fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Flow: + Flows represents the conversation + flows when you build your chatbot agent. + A flow consists of many pages connected + by the transition routes. Conversations + always start with the built-in Start + Flow (with an all-0 ID). Transition + routes can direct the conversation + session from the current flow (parent + flow) to another flow (sub flow). When + the sub flow is finished, Dialogflow + will bring the session back to the + parent flow, where the sub flow is + started. + + Usually, when a transition route is + followed by a matched intent, the intent + will be "consumed". This means the + intent won't activate more transition + routes. However, when the followed + transition route moves the conversation + session into a different flow, the + matched intent can be carried over and + to be consumed in the target flow. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([flow, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_flow.UpdateFlowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if flow is not None: + request.flow = flow + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_flow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("flow.name", request.flow.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def train_flow( + self, + request: Optional[Union[flow.TrainFlowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Trains the specified flow. Note that only the flow in 'draft' + environment is trained. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_train_flow(): + # Create a client + client = dialogflowcx_v3.FlowsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.TrainFlowRequest( + name="name_value", + ) + + # Make the request + operation = client.train_flow(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.TrainFlowRequest, dict]]): + The request object. The request message for + [Flows.TrainFlow][google.cloud.dialogflow.cx.v3.Flows.TrainFlow]. + name (:class:`str`): + Required. The flow to train. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = flow.TrainFlowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.train_flow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + async def validate_flow( + self, + request: Optional[Union[flow.ValidateFlowRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> flow.FlowValidationResult: + r"""Validates the specified flow and creates or updates + validation results. Please call this API after the + training is completed to get the complete validation + results. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_validate_flow(): + # Create a client + client = dialogflowcx_v3.FlowsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ValidateFlowRequest( + name="name_value", + ) + + # Make the request + response = await client.validate_flow(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ValidateFlowRequest, dict]]): + The request object. The request message for + [Flows.ValidateFlow][google.cloud.dialogflow.cx.v3.Flows.ValidateFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.FlowValidationResult: + The response message for + [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. + + """ + # Create or coerce a protobuf request object. + request = flow.ValidateFlowRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.validate_flow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_flow_validation_result( + self, + request: Optional[Union[flow.GetFlowValidationResultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> flow.FlowValidationResult: + r"""Gets the latest flow validation result. Flow + validation is performed when ValidateFlow is called. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_flow_validation_result(): + # Create a client + client = dialogflowcx_v3.FlowsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetFlowValidationResultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_flow_validation_result(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetFlowValidationResultRequest, dict]]): + The request object. The request message for + [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. + name (:class:`str`): + Required. The flow name. Format: + ``projects//locations//agents//flows//validationResult``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.FlowValidationResult: + The response message for + [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = flow.GetFlowValidationResultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_flow_validation_result, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def import_flow( + self, + request: Optional[Union[flow.ImportFlowRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports the specified flow to the specified agent from a binary + file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ImportFlowResponse][google.cloud.dialogflow.cx.v3.ImportFlowResponse] + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_import_flow(): + # Create a client + client = dialogflowcx_v3.FlowsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ImportFlowRequest( + flow_uri="flow_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_flow(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ImportFlowRequest, dict]]): + The request object. The request message for + [Flows.ImportFlow][google.cloud.dialogflow.cx.v3.Flows.ImportFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ImportFlowResponse` The response message for + [Flows.ImportFlow][google.cloud.dialogflow.cx.v3.Flows.ImportFlow]. + + """ + # Create or coerce a protobuf request object. + request = flow.ImportFlowRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_flow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + flow.ImportFlowResponse, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + async def export_flow( + self, + request: Optional[Union[flow.ExportFlowRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports the specified flow to a binary file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ExportFlowResponse][google.cloud.dialogflow.cx.v3.ExportFlowResponse] + + Note that resources (e.g. intents, entities, webhooks) that the + flow references will also be exported. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_export_flow(): + # Create a client + client = dialogflowcx_v3.FlowsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ExportFlowRequest( + name="name_value", + ) + + # Make the request + operation = client.export_flow(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ExportFlowRequest, dict]]): + The request object. The request message for + [Flows.ExportFlow][google.cloud.dialogflow.cx.v3.Flows.ExportFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ExportFlowResponse` The response message for + [Flows.ExportFlow][google.cloud.dialogflow.cx.v3.Flows.ExportFlow]. + + """ + # Create or coerce a protobuf request object. + request = flow.ExportFlowRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_flow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + flow.ExportFlowResponse, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "FlowsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("FlowsAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py new file mode 100644 index 000000000000..91eb1e763c0e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py @@ -0,0 +1,2025 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.flows import pagers +from google.cloud.dialogflowcx_v3.types import flow +from google.cloud.dialogflowcx_v3.types import flow as gcdc_flow +from google.cloud.dialogflowcx_v3.types import page, validation_message + +from .transports.base import DEFAULT_CLIENT_INFO, FlowsTransport +from .transports.grpc import FlowsGrpcTransport +from .transports.grpc_asyncio import FlowsGrpcAsyncIOTransport +from .transports.rest import FlowsRestTransport + + +class FlowsClientMeta(type): + """Metaclass for the Flows client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[FlowsTransport]] + _transport_registry["grpc"] = FlowsGrpcTransport + _transport_registry["grpc_asyncio"] = FlowsGrpcAsyncIOTransport + _transport_registry["rest"] = FlowsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[FlowsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class FlowsClient(metaclass=FlowsClientMeta): + """Service for managing [Flows][google.cloud.dialogflow.cx.v3.Flow].""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlowsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlowsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FlowsTransport: + """Returns the transport used by the client instance. + + Returns: + FlowsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def flow_path( + project: str, + location: str, + agent: str, + flow: str, + ) -> str: + """Returns a fully-qualified flow string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}".format( + project=project, + location=location, + agent=agent, + flow=flow, + ) + + @staticmethod + def parse_flow_path(path: str) -> Dict[str, str]: + """Parses a flow path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def flow_validation_result_path( + project: str, + location: str, + agent: str, + flow: str, + ) -> str: + """Returns a fully-qualified flow_validation_result string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/validationResult".format( + project=project, + location=location, + agent=agent, + flow=flow, + ) + + @staticmethod + def parse_flow_validation_result_path(path: str) -> Dict[str, str]: + """Parses a flow_validation_result path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/validationResult$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def intent_path( + project: str, + location: str, + agent: str, + intent: str, + ) -> str: + """Returns a fully-qualified intent string.""" + return "projects/{project}/locations/{location}/agents/{agent}/intents/{intent}".format( + project=project, + location=location, + agent=agent, + intent=intent, + ) + + @staticmethod + def parse_intent_path(path: str) -> Dict[str, str]: + """Parses a intent path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/intents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def page_path( + project: str, + location: str, + agent: str, + flow: str, + page: str, + ) -> str: + """Returns a fully-qualified page string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/pages/{page}".format( + project=project, + location=location, + agent=agent, + flow=flow, + page=page, + ) + + @staticmethod + def parse_page_path(path: str) -> Dict[str, str]: + """Parses a page path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/pages/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def transition_route_group_path( + project: str, + location: str, + agent: str, + flow: str, + transition_route_group: str, + ) -> str: + """Returns a fully-qualified transition_route_group string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/transitionRouteGroups/{transition_route_group}".format( + project=project, + location=location, + agent=agent, + flow=flow, + transition_route_group=transition_route_group, + ) + + @staticmethod + def parse_transition_route_group_path(path: str) -> Dict[str, str]: + """Parses a transition_route_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/transitionRouteGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def webhook_path( + project: str, + location: str, + agent: str, + webhook: str, + ) -> str: + """Returns a fully-qualified webhook string.""" + return "projects/{project}/locations/{location}/agents/{agent}/webhooks/{webhook}".format( + project=project, + location=location, + agent=agent, + webhook=webhook, + ) + + @staticmethod + def parse_webhook_path(path: str) -> Dict[str, str]: + """Parses a webhook path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/webhooks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, FlowsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the flows client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, FlowsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, FlowsTransport): + # transport is a FlowsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_flow( + self, + request: Optional[Union[gcdc_flow.CreateFlowRequest, dict]] = None, + *, + parent: Optional[str] = None, + flow: Optional[gcdc_flow.Flow] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_flow.Flow: + r"""Creates a flow in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_flow(): + # Create a client + client = dialogflowcx_v3.FlowsClient() + + # Initialize request argument(s) + flow = dialogflowcx_v3.Flow() + flow.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateFlowRequest( + parent="parent_value", + flow=flow, + ) + + # Make the request + response = client.create_flow(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateFlowRequest, dict]): + The request object. The request message for + [Flows.CreateFlow][google.cloud.dialogflow.cx.v3.Flows.CreateFlow]. + parent (str): + Required. The agent to create a flow for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + flow (google.cloud.dialogflowcx_v3.types.Flow): + Required. The flow to create. + This corresponds to the ``flow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Flow: + Flows represents the conversation + flows when you build your chatbot agent. + A flow consists of many pages connected + by the transition routes. Conversations + always start with the built-in Start + Flow (with an all-0 ID). Transition + routes can direct the conversation + session from the current flow (parent + flow) to another flow (sub flow). When + the sub flow is finished, Dialogflow + will bring the session back to the + parent flow, where the sub flow is + started. + + Usually, when a transition route is + followed by a matched intent, the intent + will be "consumed". This means the + intent won't activate more transition + routes. However, when the followed + transition route moves the conversation + session into a different flow, the + matched intent can be carried over and + to be consumed in the target flow. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, flow]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_flow.CreateFlowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_flow.CreateFlowRequest): + request = gcdc_flow.CreateFlowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if flow is not None: + request.flow = flow + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_flow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_flow( + self, + request: Optional[Union[flow.DeleteFlowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a specified flow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_flow(): + # Create a client + client = dialogflowcx_v3.FlowsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteFlowRequest( + name="name_value", + ) + + # Make the request + client.delete_flow(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeleteFlowRequest, dict]): + The request object. The request message for + [Flows.DeleteFlow][google.cloud.dialogflow.cx.v3.Flows.DeleteFlow]. + name (str): + Required. The name of the flow to delete. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a flow.DeleteFlowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, flow.DeleteFlowRequest): + request = flow.DeleteFlowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_flow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_flows( + self, + request: Optional[Union[flow.ListFlowsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListFlowsPager: + r"""Returns the list of all flows in the specified agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_flows(): + # Create a client + client = dialogflowcx_v3.FlowsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListFlowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_flows(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListFlowsRequest, dict]): + The request object. The request message for + [Flows.ListFlows][google.cloud.dialogflow.cx.v3.Flows.ListFlows]. + parent (str): + Required. The agent containing the flows. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.flows.pagers.ListFlowsPager: + The response message for + [Flows.ListFlows][google.cloud.dialogflow.cx.v3.Flows.ListFlows]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a flow.ListFlowsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, flow.ListFlowsRequest): + request = flow.ListFlowsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_flows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListFlowsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_flow( + self, + request: Optional[Union[flow.GetFlowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> flow.Flow: + r"""Retrieves the specified flow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_flow(): + # Create a client + client = dialogflowcx_v3.FlowsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetFlowRequest( + name="name_value", + ) + + # Make the request + response = client.get_flow(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetFlowRequest, dict]): + The request object. The response message for + [Flows.GetFlow][google.cloud.dialogflow.cx.v3.Flows.GetFlow]. + name (str): + Required. The name of the flow to get. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Flow: + Flows represents the conversation + flows when you build your chatbot agent. + A flow consists of many pages connected + by the transition routes. Conversations + always start with the built-in Start + Flow (with an all-0 ID). Transition + routes can direct the conversation + session from the current flow (parent + flow) to another flow (sub flow). When + the sub flow is finished, Dialogflow + will bring the session back to the + parent flow, where the sub flow is + started. + + Usually, when a transition route is + followed by a matched intent, the intent + will be "consumed". This means the + intent won't activate more transition + routes. However, when the followed + transition route moves the conversation + session into a different flow, the + matched intent can be carried over and + to be consumed in the target flow. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a flow.GetFlowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, flow.GetFlowRequest): + request = flow.GetFlowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_flow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_flow( + self, + request: Optional[Union[gcdc_flow.UpdateFlowRequest, dict]] = None, + *, + flow: Optional[gcdc_flow.Flow] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_flow.Flow: + r"""Updates the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_flow(): + # Create a client + client = dialogflowcx_v3.FlowsClient() + + # Initialize request argument(s) + flow = dialogflowcx_v3.Flow() + flow.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateFlowRequest( + flow=flow, + ) + + # Make the request + response = client.update_flow(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateFlowRequest, dict]): + The request object. The request message for + [Flows.UpdateFlow][google.cloud.dialogflow.cx.v3.Flows.UpdateFlow]. + flow (google.cloud.dialogflowcx_v3.types.Flow): + Required. The flow to update. + This corresponds to the ``flow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get + updated. If the mask is not present, all + fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Flow: + Flows represents the conversation + flows when you build your chatbot agent. + A flow consists of many pages connected + by the transition routes. Conversations + always start with the built-in Start + Flow (with an all-0 ID). Transition + routes can direct the conversation + session from the current flow (parent + flow) to another flow (sub flow). When + the sub flow is finished, Dialogflow + will bring the session back to the + parent flow, where the sub flow is + started. + + Usually, when a transition route is + followed by a matched intent, the intent + will be "consumed". This means the + intent won't activate more transition + routes. However, when the followed + transition route moves the conversation + session into a different flow, the + matched intent can be carried over and + to be consumed in the target flow. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([flow, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_flow.UpdateFlowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_flow.UpdateFlowRequest): + request = gcdc_flow.UpdateFlowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if flow is not None: + request.flow = flow + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_flow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("flow.name", request.flow.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def train_flow( + self, + request: Optional[Union[flow.TrainFlowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Trains the specified flow. Note that only the flow in 'draft' + environment is trained. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_train_flow(): + # Create a client + client = dialogflowcx_v3.FlowsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.TrainFlowRequest( + name="name_value", + ) + + # Make the request + operation = client.train_flow(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.TrainFlowRequest, dict]): + The request object. The request message for + [Flows.TrainFlow][google.cloud.dialogflow.cx.v3.Flows.TrainFlow]. + name (str): + Required. The flow to train. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a flow.TrainFlowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, flow.TrainFlowRequest): + request = flow.TrainFlowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.train_flow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + def validate_flow( + self, + request: Optional[Union[flow.ValidateFlowRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> flow.FlowValidationResult: + r"""Validates the specified flow and creates or updates + validation results. Please call this API after the + training is completed to get the complete validation + results. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_validate_flow(): + # Create a client + client = dialogflowcx_v3.FlowsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ValidateFlowRequest( + name="name_value", + ) + + # Make the request + response = client.validate_flow(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ValidateFlowRequest, dict]): + The request object. The request message for + [Flows.ValidateFlow][google.cloud.dialogflow.cx.v3.Flows.ValidateFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.FlowValidationResult: + The response message for + [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a flow.ValidateFlowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, flow.ValidateFlowRequest): + request = flow.ValidateFlowRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.validate_flow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_flow_validation_result( + self, + request: Optional[Union[flow.GetFlowValidationResultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> flow.FlowValidationResult: + r"""Gets the latest flow validation result. Flow + validation is performed when ValidateFlow is called. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_flow_validation_result(): + # Create a client + client = dialogflowcx_v3.FlowsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetFlowValidationResultRequest( + name="name_value", + ) + + # Make the request + response = client.get_flow_validation_result(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetFlowValidationResultRequest, dict]): + The request object. The request message for + [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. + name (str): + Required. The flow name. Format: + ``projects//locations//agents//flows//validationResult``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.FlowValidationResult: + The response message for + [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a flow.GetFlowValidationResultRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, flow.GetFlowValidationResultRequest): + request = flow.GetFlowValidationResultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_flow_validation_result + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def import_flow( + self, + request: Optional[Union[flow.ImportFlowRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports the specified flow to the specified agent from a binary + file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ImportFlowResponse][google.cloud.dialogflow.cx.v3.ImportFlowResponse] + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_import_flow(): + # Create a client + client = dialogflowcx_v3.FlowsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ImportFlowRequest( + flow_uri="flow_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_flow(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ImportFlowRequest, dict]): + The request object. The request message for + [Flows.ImportFlow][google.cloud.dialogflow.cx.v3.Flows.ImportFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ImportFlowResponse` The response message for + [Flows.ImportFlow][google.cloud.dialogflow.cx.v3.Flows.ImportFlow]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a flow.ImportFlowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, flow.ImportFlowRequest): + request = flow.ImportFlowRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_flow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + flow.ImportFlowResponse, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + def export_flow( + self, + request: Optional[Union[flow.ExportFlowRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports the specified flow to a binary file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ExportFlowResponse][google.cloud.dialogflow.cx.v3.ExportFlowResponse] + + Note that resources (e.g. intents, entities, webhooks) that the + flow references will also be exported. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_export_flow(): + # Create a client + client = dialogflowcx_v3.FlowsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ExportFlowRequest( + name="name_value", + ) + + # Make the request + operation = client.export_flow(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ExportFlowRequest, dict]): + The request object. The request message for + [Flows.ExportFlow][google.cloud.dialogflow.cx.v3.Flows.ExportFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ExportFlowResponse` The response message for + [Flows.ExportFlow][google.cloud.dialogflow.cx.v3.Flows.ExportFlow]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a flow.ExportFlowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, flow.ExportFlowRequest): + request = flow.ExportFlowRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_flow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + flow.ExportFlowResponse, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "FlowsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("FlowsClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/pagers.py new file mode 100644 index 000000000000..38a990c16dff --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import flow + + +class ListFlowsPager: + """A pager for iterating through ``list_flows`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListFlowsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``flows`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListFlows`` requests and continue to iterate + through the ``flows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListFlowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., flow.ListFlowsResponse], + request: flow.ListFlowsRequest, + response: flow.ListFlowsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListFlowsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListFlowsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = flow.ListFlowsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[flow.ListFlowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[flow.Flow]: + for page in self.pages: + yield from page.flows + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListFlowsAsyncPager: + """A pager for iterating through ``list_flows`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListFlowsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``flows`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListFlows`` requests and continue to iterate + through the ``flows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListFlowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[flow.ListFlowsResponse]], + request: flow.ListFlowsRequest, + response: flow.ListFlowsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListFlowsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListFlowsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = flow.ListFlowsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[flow.ListFlowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[flow.Flow]: + async def async_generator(): + async for page in self.pages: + for response in page.flows: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/__init__.py new file mode 100644 index 000000000000..3b54eaf837a4 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import FlowsTransport +from .grpc import FlowsGrpcTransport +from .grpc_asyncio import FlowsGrpcAsyncIOTransport +from .rest import FlowsRestInterceptor, FlowsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[FlowsTransport]] +_transport_registry["grpc"] = FlowsGrpcTransport +_transport_registry["grpc_asyncio"] = FlowsGrpcAsyncIOTransport +_transport_registry["rest"] = FlowsRestTransport + +__all__ = ( + "FlowsTransport", + "FlowsGrpcTransport", + "FlowsGrpcAsyncIOTransport", + "FlowsRestTransport", + "FlowsRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/base.py new file mode 100644 index 000000000000..e757937b4957 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/base.py @@ -0,0 +1,335 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import flow +from google.cloud.dialogflowcx_v3.types import flow as gcdc_flow + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class FlowsTransport(abc.ABC): + """Abstract transport class for Flows.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_flow: gapic_v1.method.wrap_method( + self.create_flow, + default_timeout=None, + client_info=client_info, + ), + self.delete_flow: gapic_v1.method.wrap_method( + self.delete_flow, + default_timeout=None, + client_info=client_info, + ), + self.list_flows: gapic_v1.method.wrap_method( + self.list_flows, + default_timeout=None, + client_info=client_info, + ), + self.get_flow: gapic_v1.method.wrap_method( + self.get_flow, + default_timeout=None, + client_info=client_info, + ), + self.update_flow: gapic_v1.method.wrap_method( + self.update_flow, + default_timeout=None, + client_info=client_info, + ), + self.train_flow: gapic_v1.method.wrap_method( + self.train_flow, + default_timeout=None, + client_info=client_info, + ), + self.validate_flow: gapic_v1.method.wrap_method( + self.validate_flow, + default_timeout=None, + client_info=client_info, + ), + self.get_flow_validation_result: gapic_v1.method.wrap_method( + self.get_flow_validation_result, + default_timeout=None, + client_info=client_info, + ), + self.import_flow: gapic_v1.method.wrap_method( + self.import_flow, + default_timeout=None, + client_info=client_info, + ), + self.export_flow: gapic_v1.method.wrap_method( + self.export_flow, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_flow( + self, + ) -> Callable[ + [gcdc_flow.CreateFlowRequest], Union[gcdc_flow.Flow, Awaitable[gcdc_flow.Flow]] + ]: + raise NotImplementedError() + + @property + def delete_flow( + self, + ) -> Callable[ + [flow.DeleteFlowRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def list_flows( + self, + ) -> Callable[ + [flow.ListFlowsRequest], + Union[flow.ListFlowsResponse, Awaitable[flow.ListFlowsResponse]], + ]: + raise NotImplementedError() + + @property + def get_flow( + self, + ) -> Callable[[flow.GetFlowRequest], Union[flow.Flow, Awaitable[flow.Flow]]]: + raise NotImplementedError() + + @property + def update_flow( + self, + ) -> Callable[ + [gcdc_flow.UpdateFlowRequest], Union[gcdc_flow.Flow, Awaitable[gcdc_flow.Flow]] + ]: + raise NotImplementedError() + + @property + def train_flow( + self, + ) -> Callable[ + [flow.TrainFlowRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def validate_flow( + self, + ) -> Callable[ + [flow.ValidateFlowRequest], + Union[flow.FlowValidationResult, Awaitable[flow.FlowValidationResult]], + ]: + raise NotImplementedError() + + @property + def get_flow_validation_result( + self, + ) -> Callable[ + [flow.GetFlowValidationResultRequest], + Union[flow.FlowValidationResult, Awaitable[flow.FlowValidationResult]], + ]: + raise NotImplementedError() + + @property + def import_flow( + self, + ) -> Callable[ + [flow.ImportFlowRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_flow( + self, + ) -> Callable[ + [flow.ExportFlowRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("FlowsTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/grpc.py new file mode 100644 index 000000000000..2c2d046abc16 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/grpc.py @@ -0,0 +1,650 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import flow +from google.cloud.dialogflowcx_v3.types import flow as gcdc_flow + +from .base import DEFAULT_CLIENT_INFO, FlowsTransport + + +class FlowsGrpcTransport(FlowsTransport): + """gRPC backend transport for Flows. + + Service for managing [Flows][google.cloud.dialogflow.cx.v3.Flow]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_flow(self) -> Callable[[gcdc_flow.CreateFlowRequest], gcdc_flow.Flow]: + r"""Return a callable for the create flow method over gRPC. + + Creates a flow in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreateFlowRequest], + ~.Flow]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_flow" not in self._stubs: + self._stubs["create_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/CreateFlow", + request_serializer=gcdc_flow.CreateFlowRequest.serialize, + response_deserializer=gcdc_flow.Flow.deserialize, + ) + return self._stubs["create_flow"] + + @property + def delete_flow(self) -> Callable[[flow.DeleteFlowRequest], empty_pb2.Empty]: + r"""Return a callable for the delete flow method over gRPC. + + Deletes a specified flow. + + Returns: + Callable[[~.DeleteFlowRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_flow" not in self._stubs: + self._stubs["delete_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/DeleteFlow", + request_serializer=flow.DeleteFlowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_flow"] + + @property + def list_flows(self) -> Callable[[flow.ListFlowsRequest], flow.ListFlowsResponse]: + r"""Return a callable for the list flows method over gRPC. + + Returns the list of all flows in the specified agent. + + Returns: + Callable[[~.ListFlowsRequest], + ~.ListFlowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_flows" not in self._stubs: + self._stubs["list_flows"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/ListFlows", + request_serializer=flow.ListFlowsRequest.serialize, + response_deserializer=flow.ListFlowsResponse.deserialize, + ) + return self._stubs["list_flows"] + + @property + def get_flow(self) -> Callable[[flow.GetFlowRequest], flow.Flow]: + r"""Return a callable for the get flow method over gRPC. + + Retrieves the specified flow. + + Returns: + Callable[[~.GetFlowRequest], + ~.Flow]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_flow" not in self._stubs: + self._stubs["get_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/GetFlow", + request_serializer=flow.GetFlowRequest.serialize, + response_deserializer=flow.Flow.deserialize, + ) + return self._stubs["get_flow"] + + @property + def update_flow(self) -> Callable[[gcdc_flow.UpdateFlowRequest], gcdc_flow.Flow]: + r"""Return a callable for the update flow method over gRPC. + + Updates the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdateFlowRequest], + ~.Flow]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_flow" not in self._stubs: + self._stubs["update_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/UpdateFlow", + request_serializer=gcdc_flow.UpdateFlowRequest.serialize, + response_deserializer=gcdc_flow.Flow.deserialize, + ) + return self._stubs["update_flow"] + + @property + def train_flow(self) -> Callable[[flow.TrainFlowRequest], operations_pb2.Operation]: + r"""Return a callable for the train flow method over gRPC. + + Trains the specified flow. Note that only the flow in 'draft' + environment is trained. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.TrainFlowRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "train_flow" not in self._stubs: + self._stubs["train_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/TrainFlow", + request_serializer=flow.TrainFlowRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["train_flow"] + + @property + def validate_flow( + self, + ) -> Callable[[flow.ValidateFlowRequest], flow.FlowValidationResult]: + r"""Return a callable for the validate flow method over gRPC. + + Validates the specified flow and creates or updates + validation results. Please call this API after the + training is completed to get the complete validation + results. + + Returns: + Callable[[~.ValidateFlowRequest], + ~.FlowValidationResult]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_flow" not in self._stubs: + self._stubs["validate_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/ValidateFlow", + request_serializer=flow.ValidateFlowRequest.serialize, + response_deserializer=flow.FlowValidationResult.deserialize, + ) + return self._stubs["validate_flow"] + + @property + def get_flow_validation_result( + self, + ) -> Callable[[flow.GetFlowValidationResultRequest], flow.FlowValidationResult]: + r"""Return a callable for the get flow validation result method over gRPC. + + Gets the latest flow validation result. Flow + validation is performed when ValidateFlow is called. + + Returns: + Callable[[~.GetFlowValidationResultRequest], + ~.FlowValidationResult]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_flow_validation_result" not in self._stubs: + self._stubs["get_flow_validation_result"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/GetFlowValidationResult", + request_serializer=flow.GetFlowValidationResultRequest.serialize, + response_deserializer=flow.FlowValidationResult.deserialize, + ) + return self._stubs["get_flow_validation_result"] + + @property + def import_flow( + self, + ) -> Callable[[flow.ImportFlowRequest], operations_pb2.Operation]: + r"""Return a callable for the import flow method over gRPC. + + Imports the specified flow to the specified agent from a binary + file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ImportFlowResponse][google.cloud.dialogflow.cx.v3.ImportFlowResponse] + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.ImportFlowRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_flow" not in self._stubs: + self._stubs["import_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/ImportFlow", + request_serializer=flow.ImportFlowRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_flow"] + + @property + def export_flow( + self, + ) -> Callable[[flow.ExportFlowRequest], operations_pb2.Operation]: + r"""Return a callable for the export flow method over gRPC. + + Exports the specified flow to a binary file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ExportFlowResponse][google.cloud.dialogflow.cx.v3.ExportFlowResponse] + + Note that resources (e.g. intents, entities, webhooks) that the + flow references will also be exported. + + Returns: + Callable[[~.ExportFlowRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_flow" not in self._stubs: + self._stubs["export_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/ExportFlow", + request_serializer=flow.ExportFlowRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_flow"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("FlowsGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/grpc_asyncio.py new file mode 100644 index 000000000000..6130ebf0ad55 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/grpc_asyncio.py @@ -0,0 +1,663 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import flow +from google.cloud.dialogflowcx_v3.types import flow as gcdc_flow + +from .base import DEFAULT_CLIENT_INFO, FlowsTransport +from .grpc import FlowsGrpcTransport + + +class FlowsGrpcAsyncIOTransport(FlowsTransport): + """gRPC AsyncIO backend transport for Flows. + + Service for managing [Flows][google.cloud.dialogflow.cx.v3.Flow]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_flow( + self, + ) -> Callable[[gcdc_flow.CreateFlowRequest], Awaitable[gcdc_flow.Flow]]: + r"""Return a callable for the create flow method over gRPC. + + Creates a flow in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreateFlowRequest], + Awaitable[~.Flow]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_flow" not in self._stubs: + self._stubs["create_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/CreateFlow", + request_serializer=gcdc_flow.CreateFlowRequest.serialize, + response_deserializer=gcdc_flow.Flow.deserialize, + ) + return self._stubs["create_flow"] + + @property + def delete_flow( + self, + ) -> Callable[[flow.DeleteFlowRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete flow method over gRPC. + + Deletes a specified flow. + + Returns: + Callable[[~.DeleteFlowRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_flow" not in self._stubs: + self._stubs["delete_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/DeleteFlow", + request_serializer=flow.DeleteFlowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_flow"] + + @property + def list_flows( + self, + ) -> Callable[[flow.ListFlowsRequest], Awaitable[flow.ListFlowsResponse]]: + r"""Return a callable for the list flows method over gRPC. + + Returns the list of all flows in the specified agent. + + Returns: + Callable[[~.ListFlowsRequest], + Awaitable[~.ListFlowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_flows" not in self._stubs: + self._stubs["list_flows"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/ListFlows", + request_serializer=flow.ListFlowsRequest.serialize, + response_deserializer=flow.ListFlowsResponse.deserialize, + ) + return self._stubs["list_flows"] + + @property + def get_flow(self) -> Callable[[flow.GetFlowRequest], Awaitable[flow.Flow]]: + r"""Return a callable for the get flow method over gRPC. + + Retrieves the specified flow. + + Returns: + Callable[[~.GetFlowRequest], + Awaitable[~.Flow]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_flow" not in self._stubs: + self._stubs["get_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/GetFlow", + request_serializer=flow.GetFlowRequest.serialize, + response_deserializer=flow.Flow.deserialize, + ) + return self._stubs["get_flow"] + + @property + def update_flow( + self, + ) -> Callable[[gcdc_flow.UpdateFlowRequest], Awaitable[gcdc_flow.Flow]]: + r"""Return a callable for the update flow method over gRPC. + + Updates the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdateFlowRequest], + Awaitable[~.Flow]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_flow" not in self._stubs: + self._stubs["update_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/UpdateFlow", + request_serializer=gcdc_flow.UpdateFlowRequest.serialize, + response_deserializer=gcdc_flow.Flow.deserialize, + ) + return self._stubs["update_flow"] + + @property + def train_flow( + self, + ) -> Callable[[flow.TrainFlowRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the train flow method over gRPC. + + Trains the specified flow. Note that only the flow in 'draft' + environment is trained. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.TrainFlowRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "train_flow" not in self._stubs: + self._stubs["train_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/TrainFlow", + request_serializer=flow.TrainFlowRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["train_flow"] + + @property + def validate_flow( + self, + ) -> Callable[[flow.ValidateFlowRequest], Awaitable[flow.FlowValidationResult]]: + r"""Return a callable for the validate flow method over gRPC. + + Validates the specified flow and creates or updates + validation results. Please call this API after the + training is completed to get the complete validation + results. + + Returns: + Callable[[~.ValidateFlowRequest], + Awaitable[~.FlowValidationResult]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_flow" not in self._stubs: + self._stubs["validate_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/ValidateFlow", + request_serializer=flow.ValidateFlowRequest.serialize, + response_deserializer=flow.FlowValidationResult.deserialize, + ) + return self._stubs["validate_flow"] + + @property + def get_flow_validation_result( + self, + ) -> Callable[ + [flow.GetFlowValidationResultRequest], Awaitable[flow.FlowValidationResult] + ]: + r"""Return a callable for the get flow validation result method over gRPC. + + Gets the latest flow validation result. Flow + validation is performed when ValidateFlow is called. + + Returns: + Callable[[~.GetFlowValidationResultRequest], + Awaitable[~.FlowValidationResult]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_flow_validation_result" not in self._stubs: + self._stubs["get_flow_validation_result"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/GetFlowValidationResult", + request_serializer=flow.GetFlowValidationResultRequest.serialize, + response_deserializer=flow.FlowValidationResult.deserialize, + ) + return self._stubs["get_flow_validation_result"] + + @property + def import_flow( + self, + ) -> Callable[[flow.ImportFlowRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the import flow method over gRPC. + + Imports the specified flow to the specified agent from a binary + file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ImportFlowResponse][google.cloud.dialogflow.cx.v3.ImportFlowResponse] + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.ImportFlowRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_flow" not in self._stubs: + self._stubs["import_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/ImportFlow", + request_serializer=flow.ImportFlowRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_flow"] + + @property + def export_flow( + self, + ) -> Callable[[flow.ExportFlowRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the export flow method over gRPC. + + Exports the specified flow to a binary file. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: + [ExportFlowResponse][google.cloud.dialogflow.cx.v3.ExportFlowResponse] + + Note that resources (e.g. intents, entities, webhooks) that the + flow references will also be exported. + + Returns: + Callable[[~.ExportFlowRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_flow" not in self._stubs: + self._stubs["export_flow"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Flows/ExportFlow", + request_serializer=flow.ExportFlowRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_flow"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("FlowsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/rest.py new file mode 100644 index 000000000000..11ef1f663ae7 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/rest.py @@ -0,0 +1,2035 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import flow +from google.cloud.dialogflowcx_v3.types import flow as gcdc_flow + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import FlowsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class FlowsRestInterceptor: + """Interceptor for Flows. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FlowsRestTransport. + + .. code-block:: python + class MyCustomFlowsInterceptor(FlowsRestInterceptor): + def pre_create_flow(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_flow(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_flow(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_export_flow(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_flow(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_flow(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_flow(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_flow_validation_result(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_flow_validation_result(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_import_flow(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_flow(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_flows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_flows(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_train_flow(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_train_flow(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_flow(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_flow(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_validate_flow(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_validate_flow(self, response): + logging.log(f"Received response: {response}") + return response + + transport = FlowsRestTransport(interceptor=MyCustomFlowsInterceptor()) + client = FlowsClient(transport=transport) + + + """ + + def pre_create_flow( + self, request: gcdc_flow.CreateFlowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[gcdc_flow.CreateFlowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_flow + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_create_flow(self, response: gcdc_flow.Flow) -> gcdc_flow.Flow: + """Post-rpc interceptor for create_flow + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_delete_flow( + self, request: flow.DeleteFlowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[flow.DeleteFlowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_flow + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def pre_export_flow( + self, request: flow.ExportFlowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[flow.ExportFlowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_flow + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_export_flow( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_flow + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_get_flow( + self, request: flow.GetFlowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[flow.GetFlowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_flow + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_get_flow(self, response: flow.Flow) -> flow.Flow: + """Post-rpc interceptor for get_flow + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_get_flow_validation_result( + self, + request: flow.GetFlowValidationResultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[flow.GetFlowValidationResultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_flow_validation_result + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_get_flow_validation_result( + self, response: flow.FlowValidationResult + ) -> flow.FlowValidationResult: + """Post-rpc interceptor for get_flow_validation_result + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_import_flow( + self, request: flow.ImportFlowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[flow.ImportFlowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_flow + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_import_flow( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_flow + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_list_flows( + self, request: flow.ListFlowsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[flow.ListFlowsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_flows + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_list_flows( + self, response: flow.ListFlowsResponse + ) -> flow.ListFlowsResponse: + """Post-rpc interceptor for list_flows + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_train_flow( + self, request: flow.TrainFlowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[flow.TrainFlowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for train_flow + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_train_flow( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for train_flow + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_update_flow( + self, request: gcdc_flow.UpdateFlowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[gcdc_flow.UpdateFlowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_flow + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_update_flow(self, response: gcdc_flow.Flow) -> gcdc_flow.Flow: + """Post-rpc interceptor for update_flow + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_validate_flow( + self, request: flow.ValidateFlowRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[flow.ValidateFlowRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for validate_flow + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_validate_flow( + self, response: flow.FlowValidationResult + ) -> flow.FlowValidationResult: + """Post-rpc interceptor for validate_flow + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Flows server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Flows server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FlowsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FlowsRestInterceptor + + +class FlowsRestTransport(FlowsTransport): + """REST backend transport for Flows. + + Service for managing [Flows][google.cloud.dialogflow.cx.v3.Flow]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[FlowsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FlowsRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v3", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateFlow(FlowsRestStub): + def __hash__(self): + return hash("CreateFlow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_flow.CreateFlowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_flow.Flow: + r"""Call the create flow method over HTTP. + + Args: + request (~.gcdc_flow.CreateFlowRequest): + The request object. The request message for + [Flows.CreateFlow][google.cloud.dialogflow.cx.v3.Flows.CreateFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_flow.Flow: + Flows represents the conversation + flows when you build your chatbot agent. + A flow consists of many pages connected + by the transition routes. Conversations + always start with the built-in Start + Flow (with an all-0 ID). Transition + routes can direct the conversation + session from the current flow (parent + flow) to another flow (sub flow). When + the sub flow is finished, Dialogflow + will bring the session back to the + parent flow, where the sub flow is + started. + + Usually, when a transition route is + followed by a matched intent, the intent + will be "consumed". This means the + intent won't activate more transition + routes. However, when the followed + transition route moves the conversation + session into a different flow, the + matched intent can be carried over and + to be consumed in the target flow. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/flows", + "body": "flow", + }, + ] + request, metadata = self._interceptor.pre_create_flow(request, metadata) + pb_request = gcdc_flow.CreateFlowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_flow.Flow() + pb_resp = gcdc_flow.Flow.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_flow(resp) + return resp + + class _DeleteFlow(FlowsRestStub): + def __hash__(self): + return hash("DeleteFlow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: flow.DeleteFlowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete flow method over HTTP. + + Args: + request (~.flow.DeleteFlowRequest): + The request object. The request message for + [Flows.DeleteFlow][google.cloud.dialogflow.cx.v3.Flows.DeleteFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_flow(request, metadata) + pb_request = flow.DeleteFlowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ExportFlow(FlowsRestStub): + def __hash__(self): + return hash("ExportFlow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: flow.ExportFlowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export flow method over HTTP. + + Args: + request (~.flow.ExportFlowRequest): + The request object. The request message for + [Flows.ExportFlow][google.cloud.dialogflow.cx.v3.Flows.ExportFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*}:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_flow(request, metadata) + pb_request = flow.ExportFlowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_flow(resp) + return resp + + class _GetFlow(FlowsRestStub): + def __hash__(self): + return hash("GetFlow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: flow.GetFlowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> flow.Flow: + r"""Call the get flow method over HTTP. + + Args: + request (~.flow.GetFlowRequest): + The request object. The response message for + [Flows.GetFlow][google.cloud.dialogflow.cx.v3.Flows.GetFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.flow.Flow: + Flows represents the conversation + flows when you build your chatbot agent. + A flow consists of many pages connected + by the transition routes. Conversations + always start with the built-in Start + Flow (with an all-0 ID). Transition + routes can direct the conversation + session from the current flow (parent + flow) to another flow (sub flow). When + the sub flow is finished, Dialogflow + will bring the session back to the + parent flow, where the sub flow is + started. + + Usually, when a transition route is + followed by a matched intent, the intent + will be "consumed". This means the + intent won't activate more transition + routes. However, when the followed + transition route moves the conversation + session into a different flow, the + matched intent can be carried over and + to be consumed in the target flow. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*}", + }, + ] + request, metadata = self._interceptor.pre_get_flow(request, metadata) + pb_request = flow.GetFlowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = flow.Flow() + pb_resp = flow.Flow.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_flow(resp) + return resp + + class _GetFlowValidationResult(FlowsRestStub): + def __hash__(self): + return hash("GetFlowValidationResult") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: flow.GetFlowValidationResultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> flow.FlowValidationResult: + r"""Call the get flow validation + result method over HTTP. + + Args: + request (~.flow.GetFlowValidationResultRequest): + The request object. The request message for + [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.flow.FlowValidationResult: + The response message for + [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*/validationResult}", + }, + ] + request, metadata = self._interceptor.pre_get_flow_validation_result( + request, metadata + ) + pb_request = flow.GetFlowValidationResultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = flow.FlowValidationResult() + pb_resp = flow.FlowValidationResult.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_flow_validation_result(resp) + return resp + + class _ImportFlow(FlowsRestStub): + def __hash__(self): + return hash("ImportFlow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: flow.ImportFlowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import flow method over HTTP. + + Args: + request (~.flow.ImportFlowRequest): + The request object. The request message for + [Flows.ImportFlow][google.cloud.dialogflow.cx.v3.Flows.ImportFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/flows:import", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_flow(request, metadata) + pb_request = flow.ImportFlowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_flow(resp) + return resp + + class _ListFlows(FlowsRestStub): + def __hash__(self): + return hash("ListFlows") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: flow.ListFlowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> flow.ListFlowsResponse: + r"""Call the list flows method over HTTP. + + Args: + request (~.flow.ListFlowsRequest): + The request object. The request message for + [Flows.ListFlows][google.cloud.dialogflow.cx.v3.Flows.ListFlows]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.flow.ListFlowsResponse: + The response message for + [Flows.ListFlows][google.cloud.dialogflow.cx.v3.Flows.ListFlows]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/flows", + }, + ] + request, metadata = self._interceptor.pre_list_flows(request, metadata) + pb_request = flow.ListFlowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = flow.ListFlowsResponse() + pb_resp = flow.ListFlowsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_flows(resp) + return resp + + class _TrainFlow(FlowsRestStub): + def __hash__(self): + return hash("TrainFlow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: flow.TrainFlowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the train flow method over HTTP. + + Args: + request (~.flow.TrainFlowRequest): + The request object. The request message for + [Flows.TrainFlow][google.cloud.dialogflow.cx.v3.Flows.TrainFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*}:train", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_train_flow(request, metadata) + pb_request = flow.TrainFlowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_train_flow(resp) + return resp + + class _UpdateFlow(FlowsRestStub): + def __hash__(self): + return hash("UpdateFlow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_flow.UpdateFlowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_flow.Flow: + r"""Call the update flow method over HTTP. + + Args: + request (~.gcdc_flow.UpdateFlowRequest): + The request object. The request message for + [Flows.UpdateFlow][google.cloud.dialogflow.cx.v3.Flows.UpdateFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_flow.Flow: + Flows represents the conversation + flows when you build your chatbot agent. + A flow consists of many pages connected + by the transition routes. Conversations + always start with the built-in Start + Flow (with an all-0 ID). Transition + routes can direct the conversation + session from the current flow (parent + flow) to another flow (sub flow). When + the sub flow is finished, Dialogflow + will bring the session back to the + parent flow, where the sub flow is + started. + + Usually, when a transition route is + followed by a matched intent, the intent + will be "consumed". This means the + intent won't activate more transition + routes. However, when the followed + transition route moves the conversation + session into a different flow, the + matched intent can be carried over and + to be consumed in the target flow. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{flow.name=projects/*/locations/*/agents/*/flows/*}", + "body": "flow", + }, + ] + request, metadata = self._interceptor.pre_update_flow(request, metadata) + pb_request = gcdc_flow.UpdateFlowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_flow.Flow() + pb_resp = gcdc_flow.Flow.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_flow(resp) + return resp + + class _ValidateFlow(FlowsRestStub): + def __hash__(self): + return hash("ValidateFlow") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: flow.ValidateFlowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> flow.FlowValidationResult: + r"""Call the validate flow method over HTTP. + + Args: + request (~.flow.ValidateFlowRequest): + The request object. The request message for + [Flows.ValidateFlow][google.cloud.dialogflow.cx.v3.Flows.ValidateFlow]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.flow.FlowValidationResult: + The response message for + [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*}:validate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_validate_flow(request, metadata) + pb_request = flow.ValidateFlowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = flow.FlowValidationResult() + pb_resp = flow.FlowValidationResult.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_validate_flow(resp) + return resp + + @property + def create_flow(self) -> Callable[[gcdc_flow.CreateFlowRequest], gcdc_flow.Flow]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateFlow(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_flow(self) -> Callable[[flow.DeleteFlowRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteFlow(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_flow( + self, + ) -> Callable[[flow.ExportFlowRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportFlow(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_flow(self) -> Callable[[flow.GetFlowRequest], flow.Flow]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetFlow(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_flow_validation_result( + self, + ) -> Callable[[flow.GetFlowValidationResultRequest], flow.FlowValidationResult]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetFlowValidationResult(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_flow( + self, + ) -> Callable[[flow.ImportFlowRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportFlow(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_flows(self) -> Callable[[flow.ListFlowsRequest], flow.ListFlowsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListFlows(self._session, self._host, self._interceptor) # type: ignore + + @property + def train_flow(self) -> Callable[[flow.TrainFlowRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TrainFlow(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_flow(self) -> Callable[[gcdc_flow.UpdateFlowRequest], gcdc_flow.Flow]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateFlow(self._session, self._host, self._interceptor) # type: ignore + + @property + def validate_flow( + self, + ) -> Callable[[flow.ValidateFlowRequest], flow.FlowValidationResult]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ValidateFlow(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(FlowsRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(FlowsRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(FlowsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(FlowsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(FlowsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("FlowsRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/__init__.py new file mode 100644 index 000000000000..8305b8681233 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import IntentsAsyncClient +from .client import IntentsClient + +__all__ = ( + "IntentsClient", + "IntentsAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py new file mode 100644 index 000000000000..b68cb35976f9 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py @@ -0,0 +1,1065 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.intents import pagers +from google.cloud.dialogflowcx_v3.types import intent +from google.cloud.dialogflowcx_v3.types import intent as gcdc_intent + +from .client import IntentsClient +from .transports.base import DEFAULT_CLIENT_INFO, IntentsTransport +from .transports.grpc_asyncio import IntentsGrpcAsyncIOTransport + + +class IntentsAsyncClient: + """Service for managing + [Intents][google.cloud.dialogflow.cx.v3.Intent]. + """ + + _client: IntentsClient + + DEFAULT_ENDPOINT = IntentsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = IntentsClient.DEFAULT_MTLS_ENDPOINT + + entity_type_path = staticmethod(IntentsClient.entity_type_path) + parse_entity_type_path = staticmethod(IntentsClient.parse_entity_type_path) + intent_path = staticmethod(IntentsClient.intent_path) + parse_intent_path = staticmethod(IntentsClient.parse_intent_path) + common_billing_account_path = staticmethod( + IntentsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + IntentsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(IntentsClient.common_folder_path) + parse_common_folder_path = staticmethod(IntentsClient.parse_common_folder_path) + common_organization_path = staticmethod(IntentsClient.common_organization_path) + parse_common_organization_path = staticmethod( + IntentsClient.parse_common_organization_path + ) + common_project_path = staticmethod(IntentsClient.common_project_path) + parse_common_project_path = staticmethod(IntentsClient.parse_common_project_path) + common_location_path = staticmethod(IntentsClient.common_location_path) + parse_common_location_path = staticmethod(IntentsClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IntentsAsyncClient: The constructed client. + """ + return IntentsClient.from_service_account_info.__func__(IntentsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IntentsAsyncClient: The constructed client. + """ + return IntentsClient.from_service_account_file.__func__(IntentsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return IntentsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> IntentsTransport: + """Returns the transport used by the client instance. + + Returns: + IntentsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(IntentsClient).get_transport_class, type(IntentsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, IntentsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the intents client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.IntentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = IntentsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_intents( + self, + request: Optional[Union[intent.ListIntentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListIntentsAsyncPager: + r"""Returns the list of all intents in the specified + agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_intents(): + # Create a client + client = dialogflowcx_v3.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListIntentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intents(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListIntentsRequest, dict]]): + The request object. The request message for + [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents]. + parent (:class:`str`): + Required. The agent to list all intents for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.intents.pagers.ListIntentsAsyncPager: + The response message for + [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = intent.ListIntentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_intents, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListIntentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_intent( + self, + request: Optional[Union[intent.GetIntentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> intent.Intent: + r"""Retrieves the specified intent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_intent(): + # Create a client + client = dialogflowcx_v3.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetIntentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetIntentRequest, dict]]): + The request object. The request message for + [Intents.GetIntent][google.cloud.dialogflow.cx.v3.Intents.GetIntent]. + name (:class:`str`): + Required. The name of the intent. Format: + ``projects//locations//agents//intents/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Intent: + An intent represents a user's intent + to interact with a conversational agent. + You can provide information for the + Dialogflow API to use to match user + input to an intent by adding training + phrases (i.e., examples of user input) + to your intent. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = intent.GetIntentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_intent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_intent( + self, + request: Optional[Union[gcdc_intent.CreateIntentRequest, dict]] = None, + *, + parent: Optional[str] = None, + intent: Optional[gcdc_intent.Intent] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_intent.Intent: + r"""Creates an intent in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_intent(): + # Create a client + client = dialogflowcx_v3.IntentsAsyncClient() + + # Initialize request argument(s) + intent = dialogflowcx_v3.Intent() + intent.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateIntentRequest( + parent="parent_value", + intent=intent, + ) + + # Make the request + response = await client.create_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateIntentRequest, dict]]): + The request object. The request message for + [Intents.CreateIntent][google.cloud.dialogflow.cx.v3.Intents.CreateIntent]. + parent (:class:`str`): + Required. The agent to create an intent for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intent (:class:`google.cloud.dialogflowcx_v3.types.Intent`): + Required. The intent to create. + This corresponds to the ``intent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Intent: + An intent represents a user's intent + to interact with a conversational agent. + You can provide information for the + Dialogflow API to use to match user + input to an intent by adding training + phrases (i.e., examples of user input) + to your intent. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, intent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_intent.CreateIntentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if intent is not None: + request.intent = intent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_intent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_intent( + self, + request: Optional[Union[gcdc_intent.UpdateIntentRequest, dict]] = None, + *, + intent: Optional[gcdc_intent.Intent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_intent.Intent: + r"""Updates the specified intent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_intent(): + # Create a client + client = dialogflowcx_v3.IntentsAsyncClient() + + # Initialize request argument(s) + intent = dialogflowcx_v3.Intent() + intent.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateIntentRequest( + intent=intent, + ) + + # Make the request + response = await client.update_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateIntentRequest, dict]]): + The request object. The request message for + [Intents.UpdateIntent][google.cloud.dialogflow.cx.v3.Intents.UpdateIntent]. + intent (:class:`google.cloud.dialogflowcx_v3.types.Intent`): + Required. The intent to update. + This corresponds to the ``intent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The mask to control which fields get + updated. If the mask is not present, all + fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Intent: + An intent represents a user's intent + to interact with a conversational agent. + You can provide information for the + Dialogflow API to use to match user + input to an intent by adding training + phrases (i.e., examples of user input) + to your intent. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([intent, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_intent.UpdateIntentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if intent is not None: + request.intent = intent + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_intent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("intent.name", request.intent.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_intent( + self, + request: Optional[Union[intent.DeleteIntentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified intent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_intent(): + # Create a client + client = dialogflowcx_v3.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteIntentRequest( + name="name_value", + ) + + # Make the request + await client.delete_intent(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeleteIntentRequest, dict]]): + The request object. The request message for + [Intents.DeleteIntent][google.cloud.dialogflow.cx.v3.Intents.DeleteIntent]. + name (:class:`str`): + Required. The name of the intent to delete. Format: + ``projects//locations//agents//intents/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = intent.DeleteIntentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_intent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "IntentsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("IntentsAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py new file mode 100644 index 000000000000..138b88b1273e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py @@ -0,0 +1,1331 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.intents import pagers +from google.cloud.dialogflowcx_v3.types import intent +from google.cloud.dialogflowcx_v3.types import intent as gcdc_intent + +from .transports.base import DEFAULT_CLIENT_INFO, IntentsTransport +from .transports.grpc import IntentsGrpcTransport +from .transports.grpc_asyncio import IntentsGrpcAsyncIOTransport +from .transports.rest import IntentsRestTransport + + +class IntentsClientMeta(type): + """Metaclass for the Intents client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[IntentsTransport]] + _transport_registry["grpc"] = IntentsGrpcTransport + _transport_registry["grpc_asyncio"] = IntentsGrpcAsyncIOTransport + _transport_registry["rest"] = IntentsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[IntentsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class IntentsClient(metaclass=IntentsClientMeta): + """Service for managing + [Intents][google.cloud.dialogflow.cx.v3.Intent]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IntentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IntentsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> IntentsTransport: + """Returns the transport used by the client instance. + + Returns: + IntentsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def entity_type_path( + project: str, + location: str, + agent: str, + entity_type: str, + ) -> str: + """Returns a fully-qualified entity_type string.""" + return "projects/{project}/locations/{location}/agents/{agent}/entityTypes/{entity_type}".format( + project=project, + location=location, + agent=agent, + entity_type=entity_type, + ) + + @staticmethod + def parse_entity_type_path(path: str) -> Dict[str, str]: + """Parses a entity_type path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/entityTypes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def intent_path( + project: str, + location: str, + agent: str, + intent: str, + ) -> str: + """Returns a fully-qualified intent string.""" + return "projects/{project}/locations/{location}/agents/{agent}/intents/{intent}".format( + project=project, + location=location, + agent=agent, + intent=intent, + ) + + @staticmethod + def parse_intent_path(path: str) -> Dict[str, str]: + """Parses a intent path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/intents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, IntentsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the intents client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, IntentsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, IntentsTransport): + # transport is a IntentsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_intents( + self, + request: Optional[Union[intent.ListIntentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListIntentsPager: + r"""Returns the list of all intents in the specified + agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_intents(): + # Create a client + client = dialogflowcx_v3.IntentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListIntentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intents(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListIntentsRequest, dict]): + The request object. The request message for + [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents]. + parent (str): + Required. The agent to list all intents for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.intents.pagers.ListIntentsPager: + The response message for + [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a intent.ListIntentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, intent.ListIntentsRequest): + request = intent.ListIntentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_intents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListIntentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_intent( + self, + request: Optional[Union[intent.GetIntentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> intent.Intent: + r"""Retrieves the specified intent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_intent(): + # Create a client + client = dialogflowcx_v3.IntentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetIntentRequest( + name="name_value", + ) + + # Make the request + response = client.get_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetIntentRequest, dict]): + The request object. The request message for + [Intents.GetIntent][google.cloud.dialogflow.cx.v3.Intents.GetIntent]. + name (str): + Required. The name of the intent. Format: + ``projects//locations//agents//intents/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Intent: + An intent represents a user's intent + to interact with a conversational agent. + You can provide information for the + Dialogflow API to use to match user + input to an intent by adding training + phrases (i.e., examples of user input) + to your intent. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a intent.GetIntentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, intent.GetIntentRequest): + request = intent.GetIntentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_intent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_intent( + self, + request: Optional[Union[gcdc_intent.CreateIntentRequest, dict]] = None, + *, + parent: Optional[str] = None, + intent: Optional[gcdc_intent.Intent] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_intent.Intent: + r"""Creates an intent in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_intent(): + # Create a client + client = dialogflowcx_v3.IntentsClient() + + # Initialize request argument(s) + intent = dialogflowcx_v3.Intent() + intent.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateIntentRequest( + parent="parent_value", + intent=intent, + ) + + # Make the request + response = client.create_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateIntentRequest, dict]): + The request object. The request message for + [Intents.CreateIntent][google.cloud.dialogflow.cx.v3.Intents.CreateIntent]. + parent (str): + Required. The agent to create an intent for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + intent (google.cloud.dialogflowcx_v3.types.Intent): + Required. The intent to create. + This corresponds to the ``intent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Intent: + An intent represents a user's intent + to interact with a conversational agent. + You can provide information for the + Dialogflow API to use to match user + input to an intent by adding training + phrases (i.e., examples of user input) + to your intent. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, intent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_intent.CreateIntentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_intent.CreateIntentRequest): + request = gcdc_intent.CreateIntentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if intent is not None: + request.intent = intent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_intent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_intent( + self, + request: Optional[Union[gcdc_intent.UpdateIntentRequest, dict]] = None, + *, + intent: Optional[gcdc_intent.Intent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_intent.Intent: + r"""Updates the specified intent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_intent(): + # Create a client + client = dialogflowcx_v3.IntentsClient() + + # Initialize request argument(s) + intent = dialogflowcx_v3.Intent() + intent.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateIntentRequest( + intent=intent, + ) + + # Make the request + response = client.update_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateIntentRequest, dict]): + The request object. The request message for + [Intents.UpdateIntent][google.cloud.dialogflow.cx.v3.Intents.UpdateIntent]. + intent (google.cloud.dialogflowcx_v3.types.Intent): + Required. The intent to update. + This corresponds to the ``intent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get + updated. If the mask is not present, all + fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Intent: + An intent represents a user's intent + to interact with a conversational agent. + You can provide information for the + Dialogflow API to use to match user + input to an intent by adding training + phrases (i.e., examples of user input) + to your intent. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([intent, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_intent.UpdateIntentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_intent.UpdateIntentRequest): + request = gcdc_intent.UpdateIntentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if intent is not None: + request.intent = intent + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_intent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("intent.name", request.intent.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_intent( + self, + request: Optional[Union[intent.DeleteIntentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified intent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_intent(): + # Create a client + client = dialogflowcx_v3.IntentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteIntentRequest( + name="name_value", + ) + + # Make the request + client.delete_intent(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeleteIntentRequest, dict]): + The request object. The request message for + [Intents.DeleteIntent][google.cloud.dialogflow.cx.v3.Intents.DeleteIntent]. + name (str): + Required. The name of the intent to delete. Format: + ``projects//locations//agents//intents/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a intent.DeleteIntentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, intent.DeleteIntentRequest): + request = intent.DeleteIntentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_intent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "IntentsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("IntentsClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/pagers.py new file mode 100644 index 000000000000..377598daf54e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import intent + + +class ListIntentsPager: + """A pager for iterating through ``list_intents`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListIntentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``intents`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListIntents`` requests and continue to iterate + through the ``intents`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListIntentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., intent.ListIntentsResponse], + request: intent.ListIntentsRequest, + response: intent.ListIntentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListIntentsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListIntentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = intent.ListIntentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[intent.ListIntentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[intent.Intent]: + for page in self.pages: + yield from page.intents + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListIntentsAsyncPager: + """A pager for iterating through ``list_intents`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListIntentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``intents`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListIntents`` requests and continue to iterate + through the ``intents`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListIntentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[intent.ListIntentsResponse]], + request: intent.ListIntentsRequest, + response: intent.ListIntentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListIntentsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListIntentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = intent.ListIntentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[intent.ListIntentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[intent.Intent]: + async def async_generator(): + async for page in self.pages: + for response in page.intents: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/__init__.py new file mode 100644 index 000000000000..f4d313b609a7 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import IntentsTransport +from .grpc import IntentsGrpcTransport +from .grpc_asyncio import IntentsGrpcAsyncIOTransport +from .rest import IntentsRestInterceptor, IntentsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[IntentsTransport]] +_transport_registry["grpc"] = IntentsGrpcTransport +_transport_registry["grpc_asyncio"] = IntentsGrpcAsyncIOTransport +_transport_registry["rest"] = IntentsRestTransport + +__all__ = ( + "IntentsTransport", + "IntentsGrpcTransport", + "IntentsGrpcAsyncIOTransport", + "IntentsRestTransport", + "IntentsRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/base.py new file mode 100644 index 000000000000..964064444612 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/base.py @@ -0,0 +1,264 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import intent +from google.cloud.dialogflowcx_v3.types import intent as gcdc_intent + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class IntentsTransport(abc.ABC): + """Abstract transport class for Intents.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_intents: gapic_v1.method.wrap_method( + self.list_intents, + default_timeout=None, + client_info=client_info, + ), + self.get_intent: gapic_v1.method.wrap_method( + self.get_intent, + default_timeout=None, + client_info=client_info, + ), + self.create_intent: gapic_v1.method.wrap_method( + self.create_intent, + default_timeout=None, + client_info=client_info, + ), + self.update_intent: gapic_v1.method.wrap_method( + self.update_intent, + default_timeout=None, + client_info=client_info, + ), + self.delete_intent: gapic_v1.method.wrap_method( + self.delete_intent, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_intents( + self, + ) -> Callable[ + [intent.ListIntentsRequest], + Union[intent.ListIntentsResponse, Awaitable[intent.ListIntentsResponse]], + ]: + raise NotImplementedError() + + @property + def get_intent( + self, + ) -> Callable[ + [intent.GetIntentRequest], Union[intent.Intent, Awaitable[intent.Intent]] + ]: + raise NotImplementedError() + + @property + def create_intent( + self, + ) -> Callable[ + [gcdc_intent.CreateIntentRequest], + Union[gcdc_intent.Intent, Awaitable[gcdc_intent.Intent]], + ]: + raise NotImplementedError() + + @property + def update_intent( + self, + ) -> Callable[ + [gcdc_intent.UpdateIntentRequest], + Union[gcdc_intent.Intent, Awaitable[gcdc_intent.Intent]], + ]: + raise NotImplementedError() + + @property + def delete_intent( + self, + ) -> Callable[ + [intent.DeleteIntentRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("IntentsTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc.py new file mode 100644 index 000000000000..4ff2b2d2ecd9 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc.py @@ -0,0 +1,472 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import intent +from google.cloud.dialogflowcx_v3.types import intent as gcdc_intent + +from .base import DEFAULT_CLIENT_INFO, IntentsTransport + + +class IntentsGrpcTransport(IntentsTransport): + """gRPC backend transport for Intents. + + Service for managing + [Intents][google.cloud.dialogflow.cx.v3.Intent]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_intents( + self, + ) -> Callable[[intent.ListIntentsRequest], intent.ListIntentsResponse]: + r"""Return a callable for the list intents method over gRPC. + + Returns the list of all intents in the specified + agent. + + Returns: + Callable[[~.ListIntentsRequest], + ~.ListIntentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_intents" not in self._stubs: + self._stubs["list_intents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/ListIntents", + request_serializer=intent.ListIntentsRequest.serialize, + response_deserializer=intent.ListIntentsResponse.deserialize, + ) + return self._stubs["list_intents"] + + @property + def get_intent(self) -> Callable[[intent.GetIntentRequest], intent.Intent]: + r"""Return a callable for the get intent method over gRPC. + + Retrieves the specified intent. + + Returns: + Callable[[~.GetIntentRequest], + ~.Intent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_intent" not in self._stubs: + self._stubs["get_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/GetIntent", + request_serializer=intent.GetIntentRequest.serialize, + response_deserializer=intent.Intent.deserialize, + ) + return self._stubs["get_intent"] + + @property + def create_intent( + self, + ) -> Callable[[gcdc_intent.CreateIntentRequest], gcdc_intent.Intent]: + r"""Return a callable for the create intent method over gRPC. + + Creates an intent in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreateIntentRequest], + ~.Intent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_intent" not in self._stubs: + self._stubs["create_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/CreateIntent", + request_serializer=gcdc_intent.CreateIntentRequest.serialize, + response_deserializer=gcdc_intent.Intent.deserialize, + ) + return self._stubs["create_intent"] + + @property + def update_intent( + self, + ) -> Callable[[gcdc_intent.UpdateIntentRequest], gcdc_intent.Intent]: + r"""Return a callable for the update intent method over gRPC. + + Updates the specified intent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdateIntentRequest], + ~.Intent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_intent" not in self._stubs: + self._stubs["update_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/UpdateIntent", + request_serializer=gcdc_intent.UpdateIntentRequest.serialize, + response_deserializer=gcdc_intent.Intent.deserialize, + ) + return self._stubs["update_intent"] + + @property + def delete_intent(self) -> Callable[[intent.DeleteIntentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete intent method over gRPC. + + Deletes the specified intent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.DeleteIntentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_intent" not in self._stubs: + self._stubs["delete_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/DeleteIntent", + request_serializer=intent.DeleteIntentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_intent"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("IntentsGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc_asyncio.py new file mode 100644 index 000000000000..54f3ce27c1c6 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc_asyncio.py @@ -0,0 +1,475 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import intent +from google.cloud.dialogflowcx_v3.types import intent as gcdc_intent + +from .base import DEFAULT_CLIENT_INFO, IntentsTransport +from .grpc import IntentsGrpcTransport + + +class IntentsGrpcAsyncIOTransport(IntentsTransport): + """gRPC AsyncIO backend transport for Intents. + + Service for managing + [Intents][google.cloud.dialogflow.cx.v3.Intent]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_intents( + self, + ) -> Callable[[intent.ListIntentsRequest], Awaitable[intent.ListIntentsResponse]]: + r"""Return a callable for the list intents method over gRPC. + + Returns the list of all intents in the specified + agent. + + Returns: + Callable[[~.ListIntentsRequest], + Awaitable[~.ListIntentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_intents" not in self._stubs: + self._stubs["list_intents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/ListIntents", + request_serializer=intent.ListIntentsRequest.serialize, + response_deserializer=intent.ListIntentsResponse.deserialize, + ) + return self._stubs["list_intents"] + + @property + def get_intent( + self, + ) -> Callable[[intent.GetIntentRequest], Awaitable[intent.Intent]]: + r"""Return a callable for the get intent method over gRPC. + + Retrieves the specified intent. + + Returns: + Callable[[~.GetIntentRequest], + Awaitable[~.Intent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_intent" not in self._stubs: + self._stubs["get_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/GetIntent", + request_serializer=intent.GetIntentRequest.serialize, + response_deserializer=intent.Intent.deserialize, + ) + return self._stubs["get_intent"] + + @property + def create_intent( + self, + ) -> Callable[[gcdc_intent.CreateIntentRequest], Awaitable[gcdc_intent.Intent]]: + r"""Return a callable for the create intent method over gRPC. + + Creates an intent in the specified agent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreateIntentRequest], + Awaitable[~.Intent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_intent" not in self._stubs: + self._stubs["create_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/CreateIntent", + request_serializer=gcdc_intent.CreateIntentRequest.serialize, + response_deserializer=gcdc_intent.Intent.deserialize, + ) + return self._stubs["create_intent"] + + @property + def update_intent( + self, + ) -> Callable[[gcdc_intent.UpdateIntentRequest], Awaitable[gcdc_intent.Intent]]: + r"""Return a callable for the update intent method over gRPC. + + Updates the specified intent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdateIntentRequest], + Awaitable[~.Intent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_intent" not in self._stubs: + self._stubs["update_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/UpdateIntent", + request_serializer=gcdc_intent.UpdateIntentRequest.serialize, + response_deserializer=gcdc_intent.Intent.deserialize, + ) + return self._stubs["update_intent"] + + @property + def delete_intent( + self, + ) -> Callable[[intent.DeleteIntentRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete intent method over gRPC. + + Deletes the specified intent. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.DeleteIntentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_intent" not in self._stubs: + self._stubs["delete_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/DeleteIntent", + request_serializer=intent.DeleteIntentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_intent"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("IntentsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/rest.py new file mode 100644 index 000000000000..f7a8306e7811 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/rest.py @@ -0,0 +1,1267 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import intent +from google.cloud.dialogflowcx_v3.types import intent as gcdc_intent + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import IntentsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class IntentsRestInterceptor: + """Interceptor for Intents. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the IntentsRestTransport. + + .. code-block:: python + class MyCustomIntentsInterceptor(IntentsRestInterceptor): + def pre_create_intent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_intent(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_intent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_intent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_intent(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_intents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_intents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_intent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_intent(self, response): + logging.log(f"Received response: {response}") + return response + + transport = IntentsRestTransport(interceptor=MyCustomIntentsInterceptor()) + client = IntentsClient(transport=transport) + + + """ + + def pre_create_intent( + self, + request: gcdc_intent.CreateIntentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_intent.CreateIntentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_intent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_create_intent(self, response: gcdc_intent.Intent) -> gcdc_intent.Intent: + """Post-rpc interceptor for create_intent + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + + def pre_delete_intent( + self, request: intent.DeleteIntentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[intent.DeleteIntentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_intent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def pre_get_intent( + self, request: intent.GetIntentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[intent.GetIntentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_intent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_get_intent(self, response: intent.Intent) -> intent.Intent: + """Post-rpc interceptor for get_intent + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + + def pre_list_intents( + self, request: intent.ListIntentsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[intent.ListIntentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_intents + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_list_intents( + self, response: intent.ListIntentsResponse + ) -> intent.ListIntentsResponse: + """Post-rpc interceptor for list_intents + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + + def pre_update_intent( + self, + request: gcdc_intent.UpdateIntentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_intent.UpdateIntentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_intent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_update_intent(self, response: gcdc_intent.Intent) -> gcdc_intent.Intent: + """Post-rpc interceptor for update_intent + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class IntentsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: IntentsRestInterceptor + + +class IntentsRestTransport(IntentsTransport): + """REST backend transport for Intents. + + Service for managing + [Intents][google.cloud.dialogflow.cx.v3.Intent]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[IntentsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or IntentsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateIntent(IntentsRestStub): + def __hash__(self): + return hash("CreateIntent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_intent.CreateIntentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_intent.Intent: + r"""Call the create intent method over HTTP. + + Args: + request (~.gcdc_intent.CreateIntentRequest): + The request object. The request message for + [Intents.CreateIntent][google.cloud.dialogflow.cx.v3.Intents.CreateIntent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_intent.Intent: + An intent represents a user's intent + to interact with a conversational agent. + You can provide information for the + Dialogflow API to use to match user + input to an intent by adding training + phrases (i.e., examples of user input) + to your intent. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/intents", + "body": "intent", + }, + ] + request, metadata = self._interceptor.pre_create_intent(request, metadata) + pb_request = gcdc_intent.CreateIntentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_intent.Intent() + pb_resp = gcdc_intent.Intent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_intent(resp) + return resp + + class _DeleteIntent(IntentsRestStub): + def __hash__(self): + return hash("DeleteIntent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: intent.DeleteIntentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete intent method over HTTP. + + Args: + request (~.intent.DeleteIntentRequest): + The request object. The request message for + [Intents.DeleteIntent][google.cloud.dialogflow.cx.v3.Intents.DeleteIntent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/intents/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_intent(request, metadata) + pb_request = intent.DeleteIntentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetIntent(IntentsRestStub): + def __hash__(self): + return hash("GetIntent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: intent.GetIntentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> intent.Intent: + r"""Call the get intent method over HTTP. + + Args: + request (~.intent.GetIntentRequest): + The request object. The request message for + [Intents.GetIntent][google.cloud.dialogflow.cx.v3.Intents.GetIntent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.intent.Intent: + An intent represents a user's intent + to interact with a conversational agent. + You can provide information for the + Dialogflow API to use to match user + input to an intent by adding training + phrases (i.e., examples of user input) + to your intent. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/intents/*}", + }, + ] + request, metadata = self._interceptor.pre_get_intent(request, metadata) + pb_request = intent.GetIntentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = intent.Intent() + pb_resp = intent.Intent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_intent(resp) + return resp + + class _ListIntents(IntentsRestStub): + def __hash__(self): + return hash("ListIntents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: intent.ListIntentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> intent.ListIntentsResponse: + r"""Call the list intents method over HTTP. + + Args: + request (~.intent.ListIntentsRequest): + The request object. The request message for + [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.intent.ListIntentsResponse: + The response message for + [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/intents", + }, + ] + request, metadata = self._interceptor.pre_list_intents(request, metadata) + pb_request = intent.ListIntentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = intent.ListIntentsResponse() + pb_resp = intent.ListIntentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_intents(resp) + return resp + + class _UpdateIntent(IntentsRestStub): + def __hash__(self): + return hash("UpdateIntent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_intent.UpdateIntentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_intent.Intent: + r"""Call the update intent method over HTTP. + + Args: + request (~.gcdc_intent.UpdateIntentRequest): + The request object. The request message for + [Intents.UpdateIntent][google.cloud.dialogflow.cx.v3.Intents.UpdateIntent]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_intent.Intent: + An intent represents a user's intent + to interact with a conversational agent. + You can provide information for the + Dialogflow API to use to match user + input to an intent by adding training + phrases (i.e., examples of user input) + to your intent. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{intent.name=projects/*/locations/*/agents/*/intents/*}", + "body": "intent", + }, + ] + request, metadata = self._interceptor.pre_update_intent(request, metadata) + pb_request = gcdc_intent.UpdateIntentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_intent.Intent() + pb_resp = gcdc_intent.Intent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_intent(resp) + return resp + + @property + def create_intent( + self, + ) -> Callable[[gcdc_intent.CreateIntentRequest], gcdc_intent.Intent]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateIntent(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_intent(self) -> Callable[[intent.DeleteIntentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteIntent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_intent(self) -> Callable[[intent.GetIntentRequest], intent.Intent]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIntent(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_intents( + self, + ) -> Callable[[intent.ListIntentsRequest], intent.ListIntentsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListIntents(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_intent( + self, + ) -> Callable[[gcdc_intent.UpdateIntentRequest], gcdc_intent.Intent]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateIntent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(IntentsRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(IntentsRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(IntentsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(IntentsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(IntentsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("IntentsRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/__init__.py new file mode 100644 index 000000000000..a7bd97b81e22 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import PagesAsyncClient +from .client import PagesClient + +__all__ = ( + "PagesClient", + "PagesAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py new file mode 100644 index 000000000000..a78e97d96db0 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py @@ -0,0 +1,1116 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.pages import pagers +from google.cloud.dialogflowcx_v3.types import fulfillment +from google.cloud.dialogflowcx_v3.types import page +from google.cloud.dialogflowcx_v3.types import page as gcdc_page + +from .client import PagesClient +from .transports.base import DEFAULT_CLIENT_INFO, PagesTransport +from .transports.grpc_asyncio import PagesGrpcAsyncIOTransport + + +class PagesAsyncClient: + """Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page].""" + + _client: PagesClient + + DEFAULT_ENDPOINT = PagesClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PagesClient.DEFAULT_MTLS_ENDPOINT + + entity_type_path = staticmethod(PagesClient.entity_type_path) + parse_entity_type_path = staticmethod(PagesClient.parse_entity_type_path) + flow_path = staticmethod(PagesClient.flow_path) + parse_flow_path = staticmethod(PagesClient.parse_flow_path) + intent_path = staticmethod(PagesClient.intent_path) + parse_intent_path = staticmethod(PagesClient.parse_intent_path) + page_path = staticmethod(PagesClient.page_path) + parse_page_path = staticmethod(PagesClient.parse_page_path) + transition_route_group_path = staticmethod(PagesClient.transition_route_group_path) + parse_transition_route_group_path = staticmethod( + PagesClient.parse_transition_route_group_path + ) + webhook_path = staticmethod(PagesClient.webhook_path) + parse_webhook_path = staticmethod(PagesClient.parse_webhook_path) + common_billing_account_path = staticmethod(PagesClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod( + PagesClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PagesClient.common_folder_path) + parse_common_folder_path = staticmethod(PagesClient.parse_common_folder_path) + common_organization_path = staticmethod(PagesClient.common_organization_path) + parse_common_organization_path = staticmethod( + PagesClient.parse_common_organization_path + ) + common_project_path = staticmethod(PagesClient.common_project_path) + parse_common_project_path = staticmethod(PagesClient.parse_common_project_path) + common_location_path = staticmethod(PagesClient.common_location_path) + parse_common_location_path = staticmethod(PagesClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PagesAsyncClient: The constructed client. + """ + return PagesClient.from_service_account_info.__func__(PagesAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PagesAsyncClient: The constructed client. + """ + return PagesClient.from_service_account_file.__func__(PagesAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PagesClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PagesTransport: + """Returns the transport used by the client instance. + + Returns: + PagesTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(PagesClient).get_transport_class, type(PagesClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, PagesTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the pages client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PagesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PagesClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_pages( + self, + request: Optional[Union[page.ListPagesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPagesAsyncPager: + r"""Returns the list of all pages in the specified flow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_pages(): + # Create a client + client = dialogflowcx_v3.PagesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListPagesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_pages(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListPagesRequest, dict]]): + The request object. The request message for + [Pages.ListPages][google.cloud.dialogflow.cx.v3.Pages.ListPages]. + parent (:class:`str`): + Required. The flow to list all pages for. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.pages.pagers.ListPagesAsyncPager: + The response message for + [Pages.ListPages][google.cloud.dialogflow.cx.v3.Pages.ListPages]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = page.ListPagesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_pages, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPagesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_page( + self, + request: Optional[Union[page.GetPageRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> page.Page: + r"""Retrieves the specified page. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_page(): + # Create a client + client = dialogflowcx_v3.PagesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetPageRequest( + name="name_value", + ) + + # Make the request + response = await client.get_page(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetPageRequest, dict]]): + The request object. The request message for + [Pages.GetPage][google.cloud.dialogflow.cx.v3.Pages.GetPage]. + name (:class:`str`): + Required. The name of the page. Format: + ``projects//locations//agents//flows//pages/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Page: + A Dialogflow CX conversation (session) can be described and visualized as a + state machine. The states of a CX session are + represented by pages. + + For each flow, you define many pages, where your + combined pages can handle a complete conversation on + the topics the flow is designed for. At any given + moment, exactly one page is the current page, the + current page is considered active, and the flow + associated with that page is considered active. Every + flow has a special start page. When a flow initially + becomes active, the start page page becomes the + current page. For each conversational turn, the + current page will either stay the same or transition + to another page. + + You configure each page to collect information from + the end-user that is relevant for the conversational + state represented by the page. + + For more information, see the [Page + guide](\ https://cloud.google.com/dialogflow/cx/docs/concept/page). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = page.GetPageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_page, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_page( + self, + request: Optional[Union[gcdc_page.CreatePageRequest, dict]] = None, + *, + parent: Optional[str] = None, + page: Optional[gcdc_page.Page] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_page.Page: + r"""Creates a page in the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_page(): + # Create a client + client = dialogflowcx_v3.PagesAsyncClient() + + # Initialize request argument(s) + page = dialogflowcx_v3.Page() + page.display_name = "display_name_value" + + request = dialogflowcx_v3.CreatePageRequest( + parent="parent_value", + page=page, + ) + + # Make the request + response = await client.create_page(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreatePageRequest, dict]]): + The request object. The request message for + [Pages.CreatePage][google.cloud.dialogflow.cx.v3.Pages.CreatePage]. + parent (:class:`str`): + Required. The flow to create a page for. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page (:class:`google.cloud.dialogflowcx_v3.types.Page`): + Required. The page to create. + This corresponds to the ``page`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Page: + A Dialogflow CX conversation (session) can be described and visualized as a + state machine. The states of a CX session are + represented by pages. + + For each flow, you define many pages, where your + combined pages can handle a complete conversation on + the topics the flow is designed for. At any given + moment, exactly one page is the current page, the + current page is considered active, and the flow + associated with that page is considered active. Every + flow has a special start page. When a flow initially + becomes active, the start page page becomes the + current page. For each conversational turn, the + current page will either stay the same or transition + to another page. + + You configure each page to collect information from + the end-user that is relevant for the conversational + state represented by the page. + + For more information, see the [Page + guide](\ https://cloud.google.com/dialogflow/cx/docs/concept/page). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, page]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_page.CreatePageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if page is not None: + request.page = page + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_page, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_page( + self, + request: Optional[Union[gcdc_page.UpdatePageRequest, dict]] = None, + *, + page: Optional[gcdc_page.Page] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_page.Page: + r"""Updates the specified page. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_page(): + # Create a client + client = dialogflowcx_v3.PagesAsyncClient() + + # Initialize request argument(s) + page = dialogflowcx_v3.Page() + page.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdatePageRequest( + page=page, + ) + + # Make the request + response = await client.update_page(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdatePageRequest, dict]]): + The request object. The request message for + [Pages.UpdatePage][google.cloud.dialogflow.cx.v3.Pages.UpdatePage]. + page (:class:`google.cloud.dialogflowcx_v3.types.Page`): + Required. The page to update. + This corresponds to the ``page`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The mask to control which fields get + updated. If the mask is not present, all + fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Page: + A Dialogflow CX conversation (session) can be described and visualized as a + state machine. The states of a CX session are + represented by pages. + + For each flow, you define many pages, where your + combined pages can handle a complete conversation on + the topics the flow is designed for. At any given + moment, exactly one page is the current page, the + current page is considered active, and the flow + associated with that page is considered active. Every + flow has a special start page. When a flow initially + becomes active, the start page page becomes the + current page. For each conversational turn, the + current page will either stay the same or transition + to another page. + + You configure each page to collect information from + the end-user that is relevant for the conversational + state represented by the page. + + For more information, see the [Page + guide](\ https://cloud.google.com/dialogflow/cx/docs/concept/page). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_page.UpdatePageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page is not None: + request.page = page + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_page, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("page.name", request.page.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_page( + self, + request: Optional[Union[page.DeletePageRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified page. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_page(): + # Create a client + client = dialogflowcx_v3.PagesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeletePageRequest( + name="name_value", + ) + + # Make the request + await client.delete_page(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeletePageRequest, dict]]): + The request object. The request message for + [Pages.DeletePage][google.cloud.dialogflow.cx.v3.Pages.DeletePage]. + name (:class:`str`): + Required. The name of the page to delete. Format: + ``projects//locations//agents//Flows//pages/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = page.DeletePageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_page, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PagesAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PagesAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py new file mode 100644 index 000000000000..0b21f84f1d24 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py @@ -0,0 +1,1474 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.pages import pagers +from google.cloud.dialogflowcx_v3.types import fulfillment +from google.cloud.dialogflowcx_v3.types import page +from google.cloud.dialogflowcx_v3.types import page as gcdc_page + +from .transports.base import DEFAULT_CLIENT_INFO, PagesTransport +from .transports.grpc import PagesGrpcTransport +from .transports.grpc_asyncio import PagesGrpcAsyncIOTransport +from .transports.rest import PagesRestTransport + + +class PagesClientMeta(type): + """Metaclass for the Pages client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[PagesTransport]] + _transport_registry["grpc"] = PagesGrpcTransport + _transport_registry["grpc_asyncio"] = PagesGrpcAsyncIOTransport + _transport_registry["rest"] = PagesRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PagesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PagesClient(metaclass=PagesClientMeta): + """Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page].""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PagesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PagesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PagesTransport: + """Returns the transport used by the client instance. + + Returns: + PagesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def entity_type_path( + project: str, + location: str, + agent: str, + entity_type: str, + ) -> str: + """Returns a fully-qualified entity_type string.""" + return "projects/{project}/locations/{location}/agents/{agent}/entityTypes/{entity_type}".format( + project=project, + location=location, + agent=agent, + entity_type=entity_type, + ) + + @staticmethod + def parse_entity_type_path(path: str) -> Dict[str, str]: + """Parses a entity_type path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/entityTypes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def flow_path( + project: str, + location: str, + agent: str, + flow: str, + ) -> str: + """Returns a fully-qualified flow string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}".format( + project=project, + location=location, + agent=agent, + flow=flow, + ) + + @staticmethod + def parse_flow_path(path: str) -> Dict[str, str]: + """Parses a flow path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def intent_path( + project: str, + location: str, + agent: str, + intent: str, + ) -> str: + """Returns a fully-qualified intent string.""" + return "projects/{project}/locations/{location}/agents/{agent}/intents/{intent}".format( + project=project, + location=location, + agent=agent, + intent=intent, + ) + + @staticmethod + def parse_intent_path(path: str) -> Dict[str, str]: + """Parses a intent path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/intents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def page_path( + project: str, + location: str, + agent: str, + flow: str, + page: str, + ) -> str: + """Returns a fully-qualified page string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/pages/{page}".format( + project=project, + location=location, + agent=agent, + flow=flow, + page=page, + ) + + @staticmethod + def parse_page_path(path: str) -> Dict[str, str]: + """Parses a page path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/pages/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def transition_route_group_path( + project: str, + location: str, + agent: str, + flow: str, + transition_route_group: str, + ) -> str: + """Returns a fully-qualified transition_route_group string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/transitionRouteGroups/{transition_route_group}".format( + project=project, + location=location, + agent=agent, + flow=flow, + transition_route_group=transition_route_group, + ) + + @staticmethod + def parse_transition_route_group_path(path: str) -> Dict[str, str]: + """Parses a transition_route_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/transitionRouteGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def webhook_path( + project: str, + location: str, + agent: str, + webhook: str, + ) -> str: + """Returns a fully-qualified webhook string.""" + return "projects/{project}/locations/{location}/agents/{agent}/webhooks/{webhook}".format( + project=project, + location=location, + agent=agent, + webhook=webhook, + ) + + @staticmethod + def parse_webhook_path(path: str) -> Dict[str, str]: + """Parses a webhook path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/webhooks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, PagesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the pages client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PagesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PagesTransport): + # transport is a PagesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_pages( + self, + request: Optional[Union[page.ListPagesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPagesPager: + r"""Returns the list of all pages in the specified flow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_pages(): + # Create a client + client = dialogflowcx_v3.PagesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListPagesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_pages(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListPagesRequest, dict]): + The request object. The request message for + [Pages.ListPages][google.cloud.dialogflow.cx.v3.Pages.ListPages]. + parent (str): + Required. The flow to list all pages for. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.pages.pagers.ListPagesPager: + The response message for + [Pages.ListPages][google.cloud.dialogflow.cx.v3.Pages.ListPages]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a page.ListPagesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, page.ListPagesRequest): + request = page.ListPagesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_pages] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPagesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_page( + self, + request: Optional[Union[page.GetPageRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> page.Page: + r"""Retrieves the specified page. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_page(): + # Create a client + client = dialogflowcx_v3.PagesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetPageRequest( + name="name_value", + ) + + # Make the request + response = client.get_page(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetPageRequest, dict]): + The request object. The request message for + [Pages.GetPage][google.cloud.dialogflow.cx.v3.Pages.GetPage]. + name (str): + Required. The name of the page. Format: + ``projects//locations//agents//flows//pages/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Page: + A Dialogflow CX conversation (session) can be described and visualized as a + state machine. The states of a CX session are + represented by pages. + + For each flow, you define many pages, where your + combined pages can handle a complete conversation on + the topics the flow is designed for. At any given + moment, exactly one page is the current page, the + current page is considered active, and the flow + associated with that page is considered active. Every + flow has a special start page. When a flow initially + becomes active, the start page page becomes the + current page. For each conversational turn, the + current page will either stay the same or transition + to another page. + + You configure each page to collect information from + the end-user that is relevant for the conversational + state represented by the page. + + For more information, see the [Page + guide](\ https://cloud.google.com/dialogflow/cx/docs/concept/page). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a page.GetPageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, page.GetPageRequest): + request = page.GetPageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_page] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_page( + self, + request: Optional[Union[gcdc_page.CreatePageRequest, dict]] = None, + *, + parent: Optional[str] = None, + page: Optional[gcdc_page.Page] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_page.Page: + r"""Creates a page in the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_page(): + # Create a client + client = dialogflowcx_v3.PagesClient() + + # Initialize request argument(s) + page = dialogflowcx_v3.Page() + page.display_name = "display_name_value" + + request = dialogflowcx_v3.CreatePageRequest( + parent="parent_value", + page=page, + ) + + # Make the request + response = client.create_page(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreatePageRequest, dict]): + The request object. The request message for + [Pages.CreatePage][google.cloud.dialogflow.cx.v3.Pages.CreatePage]. + parent (str): + Required. The flow to create a page for. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page (google.cloud.dialogflowcx_v3.types.Page): + Required. The page to create. + This corresponds to the ``page`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Page: + A Dialogflow CX conversation (session) can be described and visualized as a + state machine. The states of a CX session are + represented by pages. + + For each flow, you define many pages, where your + combined pages can handle a complete conversation on + the topics the flow is designed for. At any given + moment, exactly one page is the current page, the + current page is considered active, and the flow + associated with that page is considered active. Every + flow has a special start page. When a flow initially + becomes active, the start page page becomes the + current page. For each conversational turn, the + current page will either stay the same or transition + to another page. + + You configure each page to collect information from + the end-user that is relevant for the conversational + state represented by the page. + + For more information, see the [Page + guide](\ https://cloud.google.com/dialogflow/cx/docs/concept/page). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, page]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_page.CreatePageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_page.CreatePageRequest): + request = gcdc_page.CreatePageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if page is not None: + request.page = page + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_page] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_page( + self, + request: Optional[Union[gcdc_page.UpdatePageRequest, dict]] = None, + *, + page: Optional[gcdc_page.Page] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_page.Page: + r"""Updates the specified page. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_page(): + # Create a client + client = dialogflowcx_v3.PagesClient() + + # Initialize request argument(s) + page = dialogflowcx_v3.Page() + page.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdatePageRequest( + page=page, + ) + + # Make the request + response = client.update_page(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdatePageRequest, dict]): + The request object. The request message for + [Pages.UpdatePage][google.cloud.dialogflow.cx.v3.Pages.UpdatePage]. + page (google.cloud.dialogflowcx_v3.types.Page): + Required. The page to update. + This corresponds to the ``page`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get + updated. If the mask is not present, all + fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Page: + A Dialogflow CX conversation (session) can be described and visualized as a + state machine. The states of a CX session are + represented by pages. + + For each flow, you define many pages, where your + combined pages can handle a complete conversation on + the topics the flow is designed for. At any given + moment, exactly one page is the current page, the + current page is considered active, and the flow + associated with that page is considered active. Every + flow has a special start page. When a flow initially + becomes active, the start page page becomes the + current page. For each conversational turn, the + current page will either stay the same or transition + to another page. + + You configure each page to collect information from + the end-user that is relevant for the conversational + state represented by the page. + + For more information, see the [Page + guide](\ https://cloud.google.com/dialogflow/cx/docs/concept/page). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_page.UpdatePageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_page.UpdatePageRequest): + request = gcdc_page.UpdatePageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page is not None: + request.page = page + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_page] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("page.name", request.page.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_page( + self, + request: Optional[Union[page.DeletePageRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified page. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_page(): + # Create a client + client = dialogflowcx_v3.PagesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeletePageRequest( + name="name_value", + ) + + # Make the request + client.delete_page(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeletePageRequest, dict]): + The request object. The request message for + [Pages.DeletePage][google.cloud.dialogflow.cx.v3.Pages.DeletePage]. + name (str): + Required. The name of the page to delete. Format: + ``projects//locations//agents//Flows//pages/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a page.DeletePageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, page.DeletePageRequest): + request = page.DeletePageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_page] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "PagesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PagesClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/pagers.py new file mode 100644 index 000000000000..6748820bd432 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import page + + +class ListPagesPager: + """A pager for iterating through ``list_pages`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListPagesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``pages`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPages`` requests and continue to iterate + through the ``pages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListPagesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., page.ListPagesResponse], + request: page.ListPagesRequest, + response: page.ListPagesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListPagesRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListPagesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = page.ListPagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[page.ListPagesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[page.Page]: + for page in self.pages: + yield from page.pages + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPagesAsyncPager: + """A pager for iterating through ``list_pages`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListPagesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``pages`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPages`` requests and continue to iterate + through the ``pages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListPagesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[page.ListPagesResponse]], + request: page.ListPagesRequest, + response: page.ListPagesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListPagesRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListPagesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = page.ListPagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[page.ListPagesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[page.Page]: + async def async_generator(): + async for page in self.pages: + for response in page.pages: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/__init__.py new file mode 100644 index 000000000000..5e5bf53f6bfe --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PagesTransport +from .grpc import PagesGrpcTransport +from .grpc_asyncio import PagesGrpcAsyncIOTransport +from .rest import PagesRestInterceptor, PagesRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PagesTransport]] +_transport_registry["grpc"] = PagesGrpcTransport +_transport_registry["grpc_asyncio"] = PagesGrpcAsyncIOTransport +_transport_registry["rest"] = PagesRestTransport + +__all__ = ( + "PagesTransport", + "PagesGrpcTransport", + "PagesGrpcAsyncIOTransport", + "PagesRestTransport", + "PagesRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/base.py new file mode 100644 index 000000000000..a09bfc3d6ab9 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/base.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import page +from google.cloud.dialogflowcx_v3.types import page as gcdc_page + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class PagesTransport(abc.ABC): + """Abstract transport class for Pages.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_pages: gapic_v1.method.wrap_method( + self.list_pages, + default_timeout=None, + client_info=client_info, + ), + self.get_page: gapic_v1.method.wrap_method( + self.get_page, + default_timeout=None, + client_info=client_info, + ), + self.create_page: gapic_v1.method.wrap_method( + self.create_page, + default_timeout=None, + client_info=client_info, + ), + self.update_page: gapic_v1.method.wrap_method( + self.update_page, + default_timeout=None, + client_info=client_info, + ), + self.delete_page: gapic_v1.method.wrap_method( + self.delete_page, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_pages( + self, + ) -> Callable[ + [page.ListPagesRequest], + Union[page.ListPagesResponse, Awaitable[page.ListPagesResponse]], + ]: + raise NotImplementedError() + + @property + def get_page( + self, + ) -> Callable[[page.GetPageRequest], Union[page.Page, Awaitable[page.Page]]]: + raise NotImplementedError() + + @property + def create_page( + self, + ) -> Callable[ + [gcdc_page.CreatePageRequest], Union[gcdc_page.Page, Awaitable[gcdc_page.Page]] + ]: + raise NotImplementedError() + + @property + def update_page( + self, + ) -> Callable[ + [gcdc_page.UpdatePageRequest], Union[gcdc_page.Page, Awaitable[gcdc_page.Page]] + ]: + raise NotImplementedError() + + @property + def delete_page( + self, + ) -> Callable[ + [page.DeletePageRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("PagesTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/grpc.py new file mode 100644 index 000000000000..d87f6572bc9c --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/grpc.py @@ -0,0 +1,464 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import page +from google.cloud.dialogflowcx_v3.types import page as gcdc_page + +from .base import DEFAULT_CLIENT_INFO, PagesTransport + + +class PagesGrpcTransport(PagesTransport): + """gRPC backend transport for Pages. + + Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_pages(self) -> Callable[[page.ListPagesRequest], page.ListPagesResponse]: + r"""Return a callable for the list pages method over gRPC. + + Returns the list of all pages in the specified flow. + + Returns: + Callable[[~.ListPagesRequest], + ~.ListPagesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_pages" not in self._stubs: + self._stubs["list_pages"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Pages/ListPages", + request_serializer=page.ListPagesRequest.serialize, + response_deserializer=page.ListPagesResponse.deserialize, + ) + return self._stubs["list_pages"] + + @property + def get_page(self) -> Callable[[page.GetPageRequest], page.Page]: + r"""Return a callable for the get page method over gRPC. + + Retrieves the specified page. + + Returns: + Callable[[~.GetPageRequest], + ~.Page]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_page" not in self._stubs: + self._stubs["get_page"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Pages/GetPage", + request_serializer=page.GetPageRequest.serialize, + response_deserializer=page.Page.deserialize, + ) + return self._stubs["get_page"] + + @property + def create_page(self) -> Callable[[gcdc_page.CreatePageRequest], gcdc_page.Page]: + r"""Return a callable for the create page method over gRPC. + + Creates a page in the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreatePageRequest], + ~.Page]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_page" not in self._stubs: + self._stubs["create_page"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Pages/CreatePage", + request_serializer=gcdc_page.CreatePageRequest.serialize, + response_deserializer=gcdc_page.Page.deserialize, + ) + return self._stubs["create_page"] + + @property + def update_page(self) -> Callable[[gcdc_page.UpdatePageRequest], gcdc_page.Page]: + r"""Return a callable for the update page method over gRPC. + + Updates the specified page. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdatePageRequest], + ~.Page]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_page" not in self._stubs: + self._stubs["update_page"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Pages/UpdatePage", + request_serializer=gcdc_page.UpdatePageRequest.serialize, + response_deserializer=gcdc_page.Page.deserialize, + ) + return self._stubs["update_page"] + + @property + def delete_page(self) -> Callable[[page.DeletePageRequest], empty_pb2.Empty]: + r"""Return a callable for the delete page method over gRPC. + + Deletes the specified page. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.DeletePageRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_page" not in self._stubs: + self._stubs["delete_page"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Pages/DeletePage", + request_serializer=page.DeletePageRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_page"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PagesGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/grpc_asyncio.py new file mode 100644 index 000000000000..557ead2f81db --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/grpc_asyncio.py @@ -0,0 +1,471 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import page +from google.cloud.dialogflowcx_v3.types import page as gcdc_page + +from .base import DEFAULT_CLIENT_INFO, PagesTransport +from .grpc import PagesGrpcTransport + + +class PagesGrpcAsyncIOTransport(PagesTransport): + """gRPC AsyncIO backend transport for Pages. + + Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_pages( + self, + ) -> Callable[[page.ListPagesRequest], Awaitable[page.ListPagesResponse]]: + r"""Return a callable for the list pages method over gRPC. + + Returns the list of all pages in the specified flow. + + Returns: + Callable[[~.ListPagesRequest], + Awaitable[~.ListPagesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_pages" not in self._stubs: + self._stubs["list_pages"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Pages/ListPages", + request_serializer=page.ListPagesRequest.serialize, + response_deserializer=page.ListPagesResponse.deserialize, + ) + return self._stubs["list_pages"] + + @property + def get_page(self) -> Callable[[page.GetPageRequest], Awaitable[page.Page]]: + r"""Return a callable for the get page method over gRPC. + + Retrieves the specified page. + + Returns: + Callable[[~.GetPageRequest], + Awaitable[~.Page]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_page" not in self._stubs: + self._stubs["get_page"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Pages/GetPage", + request_serializer=page.GetPageRequest.serialize, + response_deserializer=page.Page.deserialize, + ) + return self._stubs["get_page"] + + @property + def create_page( + self, + ) -> Callable[[gcdc_page.CreatePageRequest], Awaitable[gcdc_page.Page]]: + r"""Return a callable for the create page method over gRPC. + + Creates a page in the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreatePageRequest], + Awaitable[~.Page]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_page" not in self._stubs: + self._stubs["create_page"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Pages/CreatePage", + request_serializer=gcdc_page.CreatePageRequest.serialize, + response_deserializer=gcdc_page.Page.deserialize, + ) + return self._stubs["create_page"] + + @property + def update_page( + self, + ) -> Callable[[gcdc_page.UpdatePageRequest], Awaitable[gcdc_page.Page]]: + r"""Return a callable for the update page method over gRPC. + + Updates the specified page. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdatePageRequest], + Awaitable[~.Page]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_page" not in self._stubs: + self._stubs["update_page"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Pages/UpdatePage", + request_serializer=gcdc_page.UpdatePageRequest.serialize, + response_deserializer=gcdc_page.Page.deserialize, + ) + return self._stubs["update_page"] + + @property + def delete_page( + self, + ) -> Callable[[page.DeletePageRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete page method over gRPC. + + Deletes the specified page. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.DeletePageRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_page" not in self._stubs: + self._stubs["delete_page"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Pages/DeletePage", + request_serializer=page.DeletePageRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_page"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("PagesGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/rest.py new file mode 100644 index 000000000000..c3808056f5d8 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/rest.py @@ -0,0 +1,1298 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import page +from google.cloud.dialogflowcx_v3.types import page as gcdc_page + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import PagesTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PagesRestInterceptor: + """Interceptor for Pages. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PagesRestTransport. + + .. code-block:: python + class MyCustomPagesInterceptor(PagesRestInterceptor): + def pre_create_page(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_page(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_page(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_page(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_page(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_pages(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_pages(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_page(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_page(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PagesRestTransport(interceptor=MyCustomPagesInterceptor()) + client = PagesClient(transport=transport) + + + """ + + def pre_create_page( + self, request: gcdc_page.CreatePageRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[gcdc_page.CreatePageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_page + + Override in a subclass to manipulate the request or metadata + before they are sent to the Pages server. + """ + return request, metadata + + def post_create_page(self, response: gcdc_page.Page) -> gcdc_page.Page: + """Post-rpc interceptor for create_page + + Override in a subclass to manipulate the response + after it is returned by the Pages server but before + it is returned to user code. + """ + return response + + def pre_delete_page( + self, request: page.DeletePageRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[page.DeletePageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_page + + Override in a subclass to manipulate the request or metadata + before they are sent to the Pages server. + """ + return request, metadata + + def pre_get_page( + self, request: page.GetPageRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[page.GetPageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_page + + Override in a subclass to manipulate the request or metadata + before they are sent to the Pages server. + """ + return request, metadata + + def post_get_page(self, response: page.Page) -> page.Page: + """Post-rpc interceptor for get_page + + Override in a subclass to manipulate the response + after it is returned by the Pages server but before + it is returned to user code. + """ + return response + + def pre_list_pages( + self, request: page.ListPagesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[page.ListPagesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_pages + + Override in a subclass to manipulate the request or metadata + before they are sent to the Pages server. + """ + return request, metadata + + def post_list_pages( + self, response: page.ListPagesResponse + ) -> page.ListPagesResponse: + """Post-rpc interceptor for list_pages + + Override in a subclass to manipulate the response + after it is returned by the Pages server but before + it is returned to user code. + """ + return response + + def pre_update_page( + self, request: gcdc_page.UpdatePageRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[gcdc_page.UpdatePageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_page + + Override in a subclass to manipulate the request or metadata + before they are sent to the Pages server. + """ + return request, metadata + + def post_update_page(self, response: gcdc_page.Page) -> gcdc_page.Page: + """Post-rpc interceptor for update_page + + Override in a subclass to manipulate the response + after it is returned by the Pages server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Pages server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Pages server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Pages server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Pages server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Pages server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Pages server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Pages server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Pages server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Pages server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Pages server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PagesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PagesRestInterceptor + + +class PagesRestTransport(PagesTransport): + """REST backend transport for Pages. + + Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PagesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PagesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreatePage(PagesRestStub): + def __hash__(self): + return hash("CreatePage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_page.CreatePageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_page.Page: + r"""Call the create page method over HTTP. + + Args: + request (~.gcdc_page.CreatePageRequest): + The request object. The request message for + [Pages.CreatePage][google.cloud.dialogflow.cx.v3.Pages.CreatePage]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_page.Page: + A Dialogflow CX conversation (session) can be described + and visualized as a state machine. The states of a CX + session are represented by pages. + + For each flow, you define many pages, where your + combined pages can handle a complete conversation on the + topics the flow is designed for. At any given moment, + exactly one page is the current page, the current page + is considered active, and the flow associated with that + page is considered active. Every flow has a special + start page. When a flow initially becomes active, the + start page page becomes the current page. For each + conversational turn, the current page will either stay + the same or transition to another page. + + You configure each page to collect information from the + end-user that is relevant for the conversational state + represented by the page. + + For more information, see the `Page + guide `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/flows/*}/pages", + "body": "page", + }, + ] + request, metadata = self._interceptor.pre_create_page(request, metadata) + pb_request = gcdc_page.CreatePageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_page.Page() + pb_resp = gcdc_page.Page.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_page(resp) + return resp + + class _DeletePage(PagesRestStub): + def __hash__(self): + return hash("DeletePage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: page.DeletePageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete page method over HTTP. + + Args: + request (~.page.DeletePageRequest): + The request object. The request message for + [Pages.DeletePage][google.cloud.dialogflow.cx.v3.Pages.DeletePage]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*/pages/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_page(request, metadata) + pb_request = page.DeletePageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetPage(PagesRestStub): + def __hash__(self): + return hash("GetPage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: page.GetPageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> page.Page: + r"""Call the get page method over HTTP. + + Args: + request (~.page.GetPageRequest): + The request object. The request message for + [Pages.GetPage][google.cloud.dialogflow.cx.v3.Pages.GetPage]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.page.Page: + A Dialogflow CX conversation (session) can be described + and visualized as a state machine. The states of a CX + session are represented by pages. + + For each flow, you define many pages, where your + combined pages can handle a complete conversation on the + topics the flow is designed for. At any given moment, + exactly one page is the current page, the current page + is considered active, and the flow associated with that + page is considered active. Every flow has a special + start page. When a flow initially becomes active, the + start page page becomes the current page. For each + conversational turn, the current page will either stay + the same or transition to another page. + + You configure each page to collect information from the + end-user that is relevant for the conversational state + represented by the page. + + For more information, see the `Page + guide `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*/pages/*}", + }, + ] + request, metadata = self._interceptor.pre_get_page(request, metadata) + pb_request = page.GetPageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = page.Page() + pb_resp = page.Page.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_page(resp) + return resp + + class _ListPages(PagesRestStub): + def __hash__(self): + return hash("ListPages") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: page.ListPagesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> page.ListPagesResponse: + r"""Call the list pages method over HTTP. + + Args: + request (~.page.ListPagesRequest): + The request object. The request message for + [Pages.ListPages][google.cloud.dialogflow.cx.v3.Pages.ListPages]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.page.ListPagesResponse: + The response message for + [Pages.ListPages][google.cloud.dialogflow.cx.v3.Pages.ListPages]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/flows/*}/pages", + }, + ] + request, metadata = self._interceptor.pre_list_pages(request, metadata) + pb_request = page.ListPagesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = page.ListPagesResponse() + pb_resp = page.ListPagesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_pages(resp) + return resp + + class _UpdatePage(PagesRestStub): + def __hash__(self): + return hash("UpdatePage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_page.UpdatePageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_page.Page: + r"""Call the update page method over HTTP. + + Args: + request (~.gcdc_page.UpdatePageRequest): + The request object. The request message for + [Pages.UpdatePage][google.cloud.dialogflow.cx.v3.Pages.UpdatePage]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_page.Page: + A Dialogflow CX conversation (session) can be described + and visualized as a state machine. The states of a CX + session are represented by pages. + + For each flow, you define many pages, where your + combined pages can handle a complete conversation on the + topics the flow is designed for. At any given moment, + exactly one page is the current page, the current page + is considered active, and the flow associated with that + page is considered active. Every flow has a special + start page. When a flow initially becomes active, the + start page page becomes the current page. For each + conversational turn, the current page will either stay + the same or transition to another page. + + You configure each page to collect information from the + end-user that is relevant for the conversational state + represented by the page. + + For more information, see the `Page + guide `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{page.name=projects/*/locations/*/agents/*/flows/*/pages/*}", + "body": "page", + }, + ] + request, metadata = self._interceptor.pre_update_page(request, metadata) + pb_request = gcdc_page.UpdatePageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_page.Page() + pb_resp = gcdc_page.Page.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_page(resp) + return resp + + @property + def create_page(self) -> Callable[[gcdc_page.CreatePageRequest], gcdc_page.Page]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePage(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_page(self) -> Callable[[page.DeletePageRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePage(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_page(self) -> Callable[[page.GetPageRequest], page.Page]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPage(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_pages(self) -> Callable[[page.ListPagesRequest], page.ListPagesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPages(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_page(self) -> Callable[[gcdc_page.UpdatePageRequest], gcdc_page.Page]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdatePage(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(PagesRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(PagesRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(PagesRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(PagesRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(PagesRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PagesRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/__init__.py new file mode 100644 index 000000000000..7b13b5bde6bf --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SecuritySettingsServiceAsyncClient +from .client import SecuritySettingsServiceClient + +__all__ = ( + "SecuritySettingsServiceClient", + "SecuritySettingsServiceAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py new file mode 100644 index 000000000000..95c8a73cdbbc --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/async_client.py @@ -0,0 +1,1102 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.security_settings_service import pagers +from google.cloud.dialogflowcx_v3.types import ( + security_settings as gcdc_security_settings, +) +from google.cloud.dialogflowcx_v3.types import security_settings + +from .client import SecuritySettingsServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SecuritySettingsServiceTransport +from .transports.grpc_asyncio import SecuritySettingsServiceGrpcAsyncIOTransport + + +class SecuritySettingsServiceAsyncClient: + """Service for managing security settings for Dialogflow.""" + + _client: SecuritySettingsServiceClient + + DEFAULT_ENDPOINT = SecuritySettingsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SecuritySettingsServiceClient.DEFAULT_MTLS_ENDPOINT + + deidentify_template_path = staticmethod( + SecuritySettingsServiceClient.deidentify_template_path + ) + parse_deidentify_template_path = staticmethod( + SecuritySettingsServiceClient.parse_deidentify_template_path + ) + inspect_template_path = staticmethod( + SecuritySettingsServiceClient.inspect_template_path + ) + parse_inspect_template_path = staticmethod( + SecuritySettingsServiceClient.parse_inspect_template_path + ) + security_settings_path = staticmethod( + SecuritySettingsServiceClient.security_settings_path + ) + parse_security_settings_path = staticmethod( + SecuritySettingsServiceClient.parse_security_settings_path + ) + common_billing_account_path = staticmethod( + SecuritySettingsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SecuritySettingsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SecuritySettingsServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SecuritySettingsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SecuritySettingsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SecuritySettingsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + SecuritySettingsServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + SecuritySettingsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + SecuritySettingsServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + SecuritySettingsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SecuritySettingsServiceAsyncClient: The constructed client. + """ + return SecuritySettingsServiceClient.from_service_account_info.__func__(SecuritySettingsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SecuritySettingsServiceAsyncClient: The constructed client. + """ + return SecuritySettingsServiceClient.from_service_account_file.__func__(SecuritySettingsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SecuritySettingsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SecuritySettingsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SecuritySettingsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SecuritySettingsServiceClient).get_transport_class, + type(SecuritySettingsServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SecuritySettingsServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the security settings service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SecuritySettingsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SecuritySettingsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_security_settings( + self, + request: Optional[ + Union[gcdc_security_settings.CreateSecuritySettingsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + security_settings: Optional[gcdc_security_settings.SecuritySettings] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_security_settings.SecuritySettings: + r"""Create security settings in the specified location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_security_settings(): + # Create a client + client = dialogflowcx_v3.SecuritySettingsServiceAsyncClient() + + # Initialize request argument(s) + security_settings = dialogflowcx_v3.SecuritySettings() + security_settings.retention_window_days = 2271 + security_settings.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateSecuritySettingsRequest( + parent="parent_value", + security_settings=security_settings, + ) + + # Make the request + response = await client.create_security_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateSecuritySettingsRequest, dict]]): + The request object. The request message for + [SecuritySettings.CreateSecuritySettings][]. + parent (:class:`str`): + Required. The location to create an + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] + for. Format: + ``projects//locations/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_settings (:class:`google.cloud.dialogflowcx_v3.types.SecuritySettings`): + Required. The security settings to + create. + + This corresponds to the ``security_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SecuritySettings: + Represents the settings related to + security issues, such as data redaction + and data retention. It may take hours + for updates on the settings to propagate + to all the related components and take + effect. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, security_settings]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_security_settings.CreateSecuritySettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if security_settings is not None: + request.security_settings = security_settings + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_security_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_security_settings( + self, + request: Optional[ + Union[security_settings.GetSecuritySettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> security_settings.SecuritySettings: + r"""Retrieves the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + The returned settings may be stale by up to 1 minute. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_security_settings(): + # Create a client + client = dialogflowcx_v3.SecuritySettingsServiceAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetSecuritySettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_security_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetSecuritySettingsRequest, dict]]): + The request object. The request message for + [SecuritySettingsService.GetSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.GetSecuritySettings]. + name (:class:`str`): + Required. Resource name of the settings. Format: + ``projects//locations//securitySettings/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SecuritySettings: + Represents the settings related to + security issues, such as data redaction + and data retention. It may take hours + for updates on the settings to propagate + to all the related components and take + effect. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = security_settings.GetSecuritySettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_security_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_security_settings( + self, + request: Optional[ + Union[gcdc_security_settings.UpdateSecuritySettingsRequest, dict] + ] = None, + *, + security_settings: Optional[gcdc_security_settings.SecuritySettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_security_settings.SecuritySettings: + r"""Updates the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_security_settings(): + # Create a client + client = dialogflowcx_v3.SecuritySettingsServiceAsyncClient() + + # Initialize request argument(s) + security_settings = dialogflowcx_v3.SecuritySettings() + security_settings.retention_window_days = 2271 + security_settings.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateSecuritySettingsRequest( + security_settings=security_settings, + ) + + # Make the request + response = await client.update_security_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateSecuritySettingsRequest, dict]]): + The request object. The request message for + [SecuritySettingsService.UpdateSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.UpdateSecuritySettings]. + security_settings (:class:`google.cloud.dialogflowcx_v3.types.SecuritySettings`): + Required. [SecuritySettings] object that contains values + for each of the fields to update. + + This corresponds to the ``security_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The mask to control which + fields get updated. If the mask is not + present, all fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SecuritySettings: + Represents the settings related to + security issues, such as data redaction + and data retention. It may take hours + for updates on the settings to propagate + to all the related components and take + effect. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([security_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_security_settings.UpdateSecuritySettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if security_settings is not None: + request.security_settings = security_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_security_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("security_settings.name", request.security_settings.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_security_settings( + self, + request: Optional[ + Union[security_settings.ListSecuritySettingsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSecuritySettingsAsyncPager: + r"""Returns the list of all security settings in the + specified location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_security_settings(): + # Create a client + client = dialogflowcx_v3.SecuritySettingsServiceAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListSecuritySettingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_settings(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListSecuritySettingsRequest, dict]]): + The request object. The request message for + [SecuritySettings.ListSecuritySettings][]. + parent (:class:`str`): + Required. The location to list all security settings + for. Format: + ``projects//locations/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.security_settings_service.pagers.ListSecuritySettingsAsyncPager: + The response message for + [SecuritySettings.ListSecuritySettings][]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = security_settings.ListSecuritySettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_security_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSecuritySettingsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_security_settings( + self, + request: Optional[ + Union[security_settings.DeleteSecuritySettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_security_settings(): + # Create a client + client = dialogflowcx_v3.SecuritySettingsServiceAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteSecuritySettingsRequest( + name="name_value", + ) + + # Make the request + await client.delete_security_settings(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeleteSecuritySettingsRequest, dict]]): + The request object. The request message for + [SecuritySettings.DeleteSecuritySettings][]. + name (:class:`str`): + Required. The name of the + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] + to delete. Format: + ``projects//locations//securitySettings/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = security_settings.DeleteSecuritySettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_security_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SecuritySettingsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SecuritySettingsServiceAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py new file mode 100644 index 000000000000..28cd58445460 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py @@ -0,0 +1,1365 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.security_settings_service import pagers +from google.cloud.dialogflowcx_v3.types import ( + security_settings as gcdc_security_settings, +) +from google.cloud.dialogflowcx_v3.types import security_settings + +from .transports.base import DEFAULT_CLIENT_INFO, SecuritySettingsServiceTransport +from .transports.grpc import SecuritySettingsServiceGrpcTransport +from .transports.grpc_asyncio import SecuritySettingsServiceGrpcAsyncIOTransport +from .transports.rest import SecuritySettingsServiceRestTransport + + +class SecuritySettingsServiceClientMeta(type): + """Metaclass for the SecuritySettingsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SecuritySettingsServiceTransport]] + _transport_registry["grpc"] = SecuritySettingsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SecuritySettingsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SecuritySettingsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SecuritySettingsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SecuritySettingsServiceClient(metaclass=SecuritySettingsServiceClientMeta): + """Service for managing security settings for Dialogflow.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SecuritySettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SecuritySettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SecuritySettingsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SecuritySettingsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def deidentify_template_path( + organization: str, + location: str, + deidentify_template: str, + ) -> str: + """Returns a fully-qualified deidentify_template string.""" + return "organizations/{organization}/locations/{location}/deidentifyTemplates/{deidentify_template}".format( + organization=organization, + location=location, + deidentify_template=deidentify_template, + ) + + @staticmethod + def parse_deidentify_template_path(path: str) -> Dict[str, str]: + """Parses a deidentify_template path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/deidentifyTemplates/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def inspect_template_path( + organization: str, + location: str, + inspect_template: str, + ) -> str: + """Returns a fully-qualified inspect_template string.""" + return "organizations/{organization}/locations/{location}/inspectTemplates/{inspect_template}".format( + organization=organization, + location=location, + inspect_template=inspect_template, + ) + + @staticmethod + def parse_inspect_template_path(path: str) -> Dict[str, str]: + """Parses a inspect_template path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/inspectTemplates/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def security_settings_path( + project: str, + location: str, + security_settings: str, + ) -> str: + """Returns a fully-qualified security_settings string.""" + return "projects/{project}/locations/{location}/securitySettings/{security_settings}".format( + project=project, + location=location, + security_settings=security_settings, + ) + + @staticmethod + def parse_security_settings_path(path: str) -> Dict[str, str]: + """Parses a security_settings path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/securitySettings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SecuritySettingsServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the security settings service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SecuritySettingsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SecuritySettingsServiceTransport): + # transport is a SecuritySettingsServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_security_settings( + self, + request: Optional[ + Union[gcdc_security_settings.CreateSecuritySettingsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + security_settings: Optional[gcdc_security_settings.SecuritySettings] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_security_settings.SecuritySettings: + r"""Create security settings in the specified location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_security_settings(): + # Create a client + client = dialogflowcx_v3.SecuritySettingsServiceClient() + + # Initialize request argument(s) + security_settings = dialogflowcx_v3.SecuritySettings() + security_settings.retention_window_days = 2271 + security_settings.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateSecuritySettingsRequest( + parent="parent_value", + security_settings=security_settings, + ) + + # Make the request + response = client.create_security_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateSecuritySettingsRequest, dict]): + The request object. The request message for + [SecuritySettings.CreateSecuritySettings][]. + parent (str): + Required. The location to create an + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] + for. Format: + ``projects//locations/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + security_settings (google.cloud.dialogflowcx_v3.types.SecuritySettings): + Required. The security settings to + create. + + This corresponds to the ``security_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SecuritySettings: + Represents the settings related to + security issues, such as data redaction + and data retention. It may take hours + for updates on the settings to propagate + to all the related components and take + effect. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, security_settings]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_security_settings.CreateSecuritySettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, gcdc_security_settings.CreateSecuritySettingsRequest + ): + request = gcdc_security_settings.CreateSecuritySettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if security_settings is not None: + request.security_settings = security_settings + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_security_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_security_settings( + self, + request: Optional[ + Union[security_settings.GetSecuritySettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> security_settings.SecuritySettings: + r"""Retrieves the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + The returned settings may be stale by up to 1 minute. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_security_settings(): + # Create a client + client = dialogflowcx_v3.SecuritySettingsServiceClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetSecuritySettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_security_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetSecuritySettingsRequest, dict]): + The request object. The request message for + [SecuritySettingsService.GetSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.GetSecuritySettings]. + name (str): + Required. Resource name of the settings. Format: + ``projects//locations//securitySettings/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SecuritySettings: + Represents the settings related to + security issues, such as data redaction + and data retention. It may take hours + for updates on the settings to propagate + to all the related components and take + effect. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a security_settings.GetSecuritySettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, security_settings.GetSecuritySettingsRequest): + request = security_settings.GetSecuritySettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_security_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_security_settings( + self, + request: Optional[ + Union[gcdc_security_settings.UpdateSecuritySettingsRequest, dict] + ] = None, + *, + security_settings: Optional[gcdc_security_settings.SecuritySettings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_security_settings.SecuritySettings: + r"""Updates the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_security_settings(): + # Create a client + client = dialogflowcx_v3.SecuritySettingsServiceClient() + + # Initialize request argument(s) + security_settings = dialogflowcx_v3.SecuritySettings() + security_settings.retention_window_days = 2271 + security_settings.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateSecuritySettingsRequest( + security_settings=security_settings, + ) + + # Make the request + response = client.update_security_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateSecuritySettingsRequest, dict]): + The request object. The request message for + [SecuritySettingsService.UpdateSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.UpdateSecuritySettings]. + security_settings (google.cloud.dialogflowcx_v3.types.SecuritySettings): + Required. [SecuritySettings] object that contains values + for each of the fields to update. + + This corresponds to the ``security_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The mask to control which + fields get updated. If the mask is not + present, all fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SecuritySettings: + Represents the settings related to + security issues, such as data redaction + and data retention. It may take hours + for updates on the settings to propagate + to all the related components and take + effect. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([security_settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_security_settings.UpdateSecuritySettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, gcdc_security_settings.UpdateSecuritySettingsRequest + ): + request = gcdc_security_settings.UpdateSecuritySettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if security_settings is not None: + request.security_settings = security_settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_security_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("security_settings.name", request.security_settings.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_security_settings( + self, + request: Optional[ + Union[security_settings.ListSecuritySettingsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSecuritySettingsPager: + r"""Returns the list of all security settings in the + specified location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_security_settings(): + # Create a client + client = dialogflowcx_v3.SecuritySettingsServiceClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListSecuritySettingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_security_settings(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListSecuritySettingsRequest, dict]): + The request object. The request message for + [SecuritySettings.ListSecuritySettings][]. + parent (str): + Required. The location to list all security settings + for. Format: + ``projects//locations/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.security_settings_service.pagers.ListSecuritySettingsPager: + The response message for + [SecuritySettings.ListSecuritySettings][]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a security_settings.ListSecuritySettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, security_settings.ListSecuritySettingsRequest): + request = security_settings.ListSecuritySettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_security_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSecuritySettingsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_security_settings( + self, + request: Optional[ + Union[security_settings.DeleteSecuritySettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_security_settings(): + # Create a client + client = dialogflowcx_v3.SecuritySettingsServiceClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteSecuritySettingsRequest( + name="name_value", + ) + + # Make the request + client.delete_security_settings(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeleteSecuritySettingsRequest, dict]): + The request object. The request message for + [SecuritySettings.DeleteSecuritySettings][]. + name (str): + Required. The name of the + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] + to delete. Format: + ``projects//locations//securitySettings/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a security_settings.DeleteSecuritySettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, security_settings.DeleteSecuritySettingsRequest): + request = security_settings.DeleteSecuritySettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_security_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "SecuritySettingsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SecuritySettingsServiceClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/pagers.py new file mode 100644 index 000000000000..87aa723bdbcc --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/pagers.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import security_settings + + +class ListSecuritySettingsPager: + """A pager for iterating through ``list_security_settings`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListSecuritySettingsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``security_settings`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSecuritySettings`` requests and continue to iterate + through the ``security_settings`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListSecuritySettingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., security_settings.ListSecuritySettingsResponse], + request: security_settings.ListSecuritySettingsRequest, + response: security_settings.ListSecuritySettingsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListSecuritySettingsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListSecuritySettingsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = security_settings.ListSecuritySettingsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[security_settings.ListSecuritySettingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[security_settings.SecuritySettings]: + for page in self.pages: + yield from page.security_settings + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSecuritySettingsAsyncPager: + """A pager for iterating through ``list_security_settings`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListSecuritySettingsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``security_settings`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSecuritySettings`` requests and continue to iterate + through the ``security_settings`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListSecuritySettingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[security_settings.ListSecuritySettingsResponse] + ], + request: security_settings.ListSecuritySettingsRequest, + response: security_settings.ListSecuritySettingsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListSecuritySettingsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListSecuritySettingsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = security_settings.ListSecuritySettingsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[security_settings.ListSecuritySettingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[security_settings.SecuritySettings]: + async def async_generator(): + async for page in self.pages: + for response in page.security_settings: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/__init__.py new file mode 100644 index 000000000000..78cee5bb3ba9 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SecuritySettingsServiceTransport +from .grpc import SecuritySettingsServiceGrpcTransport +from .grpc_asyncio import SecuritySettingsServiceGrpcAsyncIOTransport +from .rest import ( + SecuritySettingsServiceRestInterceptor, + SecuritySettingsServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[SecuritySettingsServiceTransport]] +_transport_registry["grpc"] = SecuritySettingsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SecuritySettingsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SecuritySettingsServiceRestTransport + +__all__ = ( + "SecuritySettingsServiceTransport", + "SecuritySettingsServiceGrpcTransport", + "SecuritySettingsServiceGrpcAsyncIOTransport", + "SecuritySettingsServiceRestTransport", + "SecuritySettingsServiceRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/base.py new file mode 100644 index 000000000000..dca4f8ad7bcc --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/base.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import ( + security_settings as gcdc_security_settings, +) +from google.cloud.dialogflowcx_v3.types import security_settings + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SecuritySettingsServiceTransport(abc.ABC): + """Abstract transport class for SecuritySettingsService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_security_settings: gapic_v1.method.wrap_method( + self.create_security_settings, + default_timeout=None, + client_info=client_info, + ), + self.get_security_settings: gapic_v1.method.wrap_method( + self.get_security_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_security_settings: gapic_v1.method.wrap_method( + self.update_security_settings, + default_timeout=None, + client_info=client_info, + ), + self.list_security_settings: gapic_v1.method.wrap_method( + self.list_security_settings, + default_timeout=None, + client_info=client_info, + ), + self.delete_security_settings: gapic_v1.method.wrap_method( + self.delete_security_settings, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_security_settings( + self, + ) -> Callable[ + [gcdc_security_settings.CreateSecuritySettingsRequest], + Union[ + gcdc_security_settings.SecuritySettings, + Awaitable[gcdc_security_settings.SecuritySettings], + ], + ]: + raise NotImplementedError() + + @property + def get_security_settings( + self, + ) -> Callable[ + [security_settings.GetSecuritySettingsRequest], + Union[ + security_settings.SecuritySettings, + Awaitable[security_settings.SecuritySettings], + ], + ]: + raise NotImplementedError() + + @property + def update_security_settings( + self, + ) -> Callable[ + [gcdc_security_settings.UpdateSecuritySettingsRequest], + Union[ + gcdc_security_settings.SecuritySettings, + Awaitable[gcdc_security_settings.SecuritySettings], + ], + ]: + raise NotImplementedError() + + @property + def list_security_settings( + self, + ) -> Callable[ + [security_settings.ListSecuritySettingsRequest], + Union[ + security_settings.ListSecuritySettingsResponse, + Awaitable[security_settings.ListSecuritySettingsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_security_settings( + self, + ) -> Callable[ + [security_settings.DeleteSecuritySettingsRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SecuritySettingsServiceTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/grpc.py new file mode 100644 index 000000000000..25d37dc3461a --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/grpc.py @@ -0,0 +1,481 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + security_settings as gcdc_security_settings, +) +from google.cloud.dialogflowcx_v3.types import security_settings + +from .base import DEFAULT_CLIENT_INFO, SecuritySettingsServiceTransport + + +class SecuritySettingsServiceGrpcTransport(SecuritySettingsServiceTransport): + """gRPC backend transport for SecuritySettingsService. + + Service for managing security settings for Dialogflow. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_security_settings( + self, + ) -> Callable[ + [gcdc_security_settings.CreateSecuritySettingsRequest], + gcdc_security_settings.SecuritySettings, + ]: + r"""Return a callable for the create security settings method over gRPC. + + Create security settings in the specified location. + + Returns: + Callable[[~.CreateSecuritySettingsRequest], + ~.SecuritySettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_security_settings" not in self._stubs: + self._stubs["create_security_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SecuritySettingsService/CreateSecuritySettings", + request_serializer=gcdc_security_settings.CreateSecuritySettingsRequest.serialize, + response_deserializer=gcdc_security_settings.SecuritySettings.deserialize, + ) + return self._stubs["create_security_settings"] + + @property + def get_security_settings( + self, + ) -> Callable[ + [security_settings.GetSecuritySettingsRequest], + security_settings.SecuritySettings, + ]: + r"""Return a callable for the get security settings method over gRPC. + + Retrieves the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + The returned settings may be stale by up to 1 minute. + + Returns: + Callable[[~.GetSecuritySettingsRequest], + ~.SecuritySettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_security_settings" not in self._stubs: + self._stubs["get_security_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SecuritySettingsService/GetSecuritySettings", + request_serializer=security_settings.GetSecuritySettingsRequest.serialize, + response_deserializer=security_settings.SecuritySettings.deserialize, + ) + return self._stubs["get_security_settings"] + + @property + def update_security_settings( + self, + ) -> Callable[ + [gcdc_security_settings.UpdateSecuritySettingsRequest], + gcdc_security_settings.SecuritySettings, + ]: + r"""Return a callable for the update security settings method over gRPC. + + Updates the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + + Returns: + Callable[[~.UpdateSecuritySettingsRequest], + ~.SecuritySettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_security_settings" not in self._stubs: + self._stubs["update_security_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SecuritySettingsService/UpdateSecuritySettings", + request_serializer=gcdc_security_settings.UpdateSecuritySettingsRequest.serialize, + response_deserializer=gcdc_security_settings.SecuritySettings.deserialize, + ) + return self._stubs["update_security_settings"] + + @property + def list_security_settings( + self, + ) -> Callable[ + [security_settings.ListSecuritySettingsRequest], + security_settings.ListSecuritySettingsResponse, + ]: + r"""Return a callable for the list security settings method over gRPC. + + Returns the list of all security settings in the + specified location. + + Returns: + Callable[[~.ListSecuritySettingsRequest], + ~.ListSecuritySettingsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_security_settings" not in self._stubs: + self._stubs["list_security_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SecuritySettingsService/ListSecuritySettings", + request_serializer=security_settings.ListSecuritySettingsRequest.serialize, + response_deserializer=security_settings.ListSecuritySettingsResponse.deserialize, + ) + return self._stubs["list_security_settings"] + + @property + def delete_security_settings( + self, + ) -> Callable[[security_settings.DeleteSecuritySettingsRequest], empty_pb2.Empty]: + r"""Return a callable for the delete security settings method over gRPC. + + Deletes the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + + Returns: + Callable[[~.DeleteSecuritySettingsRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_security_settings" not in self._stubs: + self._stubs["delete_security_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SecuritySettingsService/DeleteSecuritySettings", + request_serializer=security_settings.DeleteSecuritySettingsRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_security_settings"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SecuritySettingsServiceGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..fbb4b5d24692 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/grpc_asyncio.py @@ -0,0 +1,482 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + security_settings as gcdc_security_settings, +) +from google.cloud.dialogflowcx_v3.types import security_settings + +from .base import DEFAULT_CLIENT_INFO, SecuritySettingsServiceTransport +from .grpc import SecuritySettingsServiceGrpcTransport + + +class SecuritySettingsServiceGrpcAsyncIOTransport(SecuritySettingsServiceTransport): + """gRPC AsyncIO backend transport for SecuritySettingsService. + + Service for managing security settings for Dialogflow. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_security_settings( + self, + ) -> Callable[ + [gcdc_security_settings.CreateSecuritySettingsRequest], + Awaitable[gcdc_security_settings.SecuritySettings], + ]: + r"""Return a callable for the create security settings method over gRPC. + + Create security settings in the specified location. + + Returns: + Callable[[~.CreateSecuritySettingsRequest], + Awaitable[~.SecuritySettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_security_settings" not in self._stubs: + self._stubs["create_security_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SecuritySettingsService/CreateSecuritySettings", + request_serializer=gcdc_security_settings.CreateSecuritySettingsRequest.serialize, + response_deserializer=gcdc_security_settings.SecuritySettings.deserialize, + ) + return self._stubs["create_security_settings"] + + @property + def get_security_settings( + self, + ) -> Callable[ + [security_settings.GetSecuritySettingsRequest], + Awaitable[security_settings.SecuritySettings], + ]: + r"""Return a callable for the get security settings method over gRPC. + + Retrieves the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + The returned settings may be stale by up to 1 minute. + + Returns: + Callable[[~.GetSecuritySettingsRequest], + Awaitable[~.SecuritySettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_security_settings" not in self._stubs: + self._stubs["get_security_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SecuritySettingsService/GetSecuritySettings", + request_serializer=security_settings.GetSecuritySettingsRequest.serialize, + response_deserializer=security_settings.SecuritySettings.deserialize, + ) + return self._stubs["get_security_settings"] + + @property + def update_security_settings( + self, + ) -> Callable[ + [gcdc_security_settings.UpdateSecuritySettingsRequest], + Awaitable[gcdc_security_settings.SecuritySettings], + ]: + r"""Return a callable for the update security settings method over gRPC. + + Updates the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + + Returns: + Callable[[~.UpdateSecuritySettingsRequest], + Awaitable[~.SecuritySettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_security_settings" not in self._stubs: + self._stubs["update_security_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SecuritySettingsService/UpdateSecuritySettings", + request_serializer=gcdc_security_settings.UpdateSecuritySettingsRequest.serialize, + response_deserializer=gcdc_security_settings.SecuritySettings.deserialize, + ) + return self._stubs["update_security_settings"] + + @property + def list_security_settings( + self, + ) -> Callable[ + [security_settings.ListSecuritySettingsRequest], + Awaitable[security_settings.ListSecuritySettingsResponse], + ]: + r"""Return a callable for the list security settings method over gRPC. + + Returns the list of all security settings in the + specified location. + + Returns: + Callable[[~.ListSecuritySettingsRequest], + Awaitable[~.ListSecuritySettingsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_security_settings" not in self._stubs: + self._stubs["list_security_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SecuritySettingsService/ListSecuritySettings", + request_serializer=security_settings.ListSecuritySettingsRequest.serialize, + response_deserializer=security_settings.ListSecuritySettingsResponse.deserialize, + ) + return self._stubs["list_security_settings"] + + @property + def delete_security_settings( + self, + ) -> Callable[ + [security_settings.DeleteSecuritySettingsRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete security settings method over gRPC. + + Deletes the specified + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings]. + + Returns: + Callable[[~.DeleteSecuritySettingsRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_security_settings" not in self._stubs: + self._stubs["delete_security_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SecuritySettingsService/DeleteSecuritySettings", + request_serializer=security_settings.DeleteSecuritySettingsRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_security_settings"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("SecuritySettingsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/rest.py new file mode 100644 index 000000000000..a178f291b1db --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/rest.py @@ -0,0 +1,1317 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + security_settings as gcdc_security_settings, +) +from google.cloud.dialogflowcx_v3.types import security_settings + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SecuritySettingsServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SecuritySettingsServiceRestInterceptor: + """Interceptor for SecuritySettingsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SecuritySettingsServiceRestTransport. + + .. code-block:: python + class MyCustomSecuritySettingsServiceInterceptor(SecuritySettingsServiceRestInterceptor): + def pre_create_security_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_security_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_security_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_security_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_security_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_security_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_security_settings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_security_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_security_settings(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SecuritySettingsServiceRestTransport(interceptor=MyCustomSecuritySettingsServiceInterceptor()) + client = SecuritySettingsServiceClient(transport=transport) + + + """ + + def pre_create_security_settings( + self, + request: gcdc_security_settings.CreateSecuritySettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + gcdc_security_settings.CreateSecuritySettingsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_security_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecuritySettingsService server. + """ + return request, metadata + + def post_create_security_settings( + self, response: gcdc_security_settings.SecuritySettings + ) -> gcdc_security_settings.SecuritySettings: + """Post-rpc interceptor for create_security_settings + + Override in a subclass to manipulate the response + after it is returned by the SecuritySettingsService server but before + it is returned to user code. + """ + return response + + def pre_delete_security_settings( + self, + request: security_settings.DeleteSecuritySettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + security_settings.DeleteSecuritySettingsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_security_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecuritySettingsService server. + """ + return request, metadata + + def pre_get_security_settings( + self, + request: security_settings.GetSecuritySettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[security_settings.GetSecuritySettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_security_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecuritySettingsService server. + """ + return request, metadata + + def post_get_security_settings( + self, response: security_settings.SecuritySettings + ) -> security_settings.SecuritySettings: + """Post-rpc interceptor for get_security_settings + + Override in a subclass to manipulate the response + after it is returned by the SecuritySettingsService server but before + it is returned to user code. + """ + return response + + def pre_list_security_settings( + self, + request: security_settings.ListSecuritySettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + security_settings.ListSecuritySettingsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_security_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecuritySettingsService server. + """ + return request, metadata + + def post_list_security_settings( + self, response: security_settings.ListSecuritySettingsResponse + ) -> security_settings.ListSecuritySettingsResponse: + """Post-rpc interceptor for list_security_settings + + Override in a subclass to manipulate the response + after it is returned by the SecuritySettingsService server but before + it is returned to user code. + """ + return response + + def pre_update_security_settings( + self, + request: gcdc_security_settings.UpdateSecuritySettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + gcdc_security_settings.UpdateSecuritySettingsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_security_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecuritySettingsService server. + """ + return request, metadata + + def post_update_security_settings( + self, response: gcdc_security_settings.SecuritySettings + ) -> gcdc_security_settings.SecuritySettings: + """Post-rpc interceptor for update_security_settings + + Override in a subclass to manipulate the response + after it is returned by the SecuritySettingsService server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecuritySettingsService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the SecuritySettingsService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecuritySettingsService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the SecuritySettingsService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecuritySettingsService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SecuritySettingsService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecuritySettingsService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SecuritySettingsService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SecuritySettingsService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SecuritySettingsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SecuritySettingsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SecuritySettingsServiceRestInterceptor + + +class SecuritySettingsServiceRestTransport(SecuritySettingsServiceTransport): + """REST backend transport for SecuritySettingsService. + + Service for managing security settings for Dialogflow. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SecuritySettingsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SecuritySettingsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateSecuritySettings(SecuritySettingsServiceRestStub): + def __hash__(self): + return hash("CreateSecuritySettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_security_settings.CreateSecuritySettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_security_settings.SecuritySettings: + r"""Call the create security settings method over HTTP. + + Args: + request (~.gcdc_security_settings.CreateSecuritySettingsRequest): + The request object. The request message for + [SecuritySettings.CreateSecuritySettings][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_security_settings.SecuritySettings: + Represents the settings related to + security issues, such as data redaction + and data retention. It may take hours + for updates on the settings to propagate + to all the related components and take + effect. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*}/securitySettings", + "body": "security_settings", + }, + ] + request, metadata = self._interceptor.pre_create_security_settings( + request, metadata + ) + pb_request = gcdc_security_settings.CreateSecuritySettingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_security_settings.SecuritySettings() + pb_resp = gcdc_security_settings.SecuritySettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_security_settings(resp) + return resp + + class _DeleteSecuritySettings(SecuritySettingsServiceRestStub): + def __hash__(self): + return hash("DeleteSecuritySettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: security_settings.DeleteSecuritySettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete security settings method over HTTP. + + Args: + request (~.security_settings.DeleteSecuritySettingsRequest): + The request object. The request message for + [SecuritySettings.DeleteSecuritySettings][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/securitySettings/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_security_settings( + request, metadata + ) + pb_request = security_settings.DeleteSecuritySettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetSecuritySettings(SecuritySettingsServiceRestStub): + def __hash__(self): + return hash("GetSecuritySettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: security_settings.GetSecuritySettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> security_settings.SecuritySettings: + r"""Call the get security settings method over HTTP. + + Args: + request (~.security_settings.GetSecuritySettingsRequest): + The request object. The request message for + [SecuritySettingsService.GetSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.GetSecuritySettings]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.security_settings.SecuritySettings: + Represents the settings related to + security issues, such as data redaction + and data retention. It may take hours + for updates on the settings to propagate + to all the related components and take + effect. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/securitySettings/*}", + }, + ] + request, metadata = self._interceptor.pre_get_security_settings( + request, metadata + ) + pb_request = security_settings.GetSecuritySettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = security_settings.SecuritySettings() + pb_resp = security_settings.SecuritySettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_security_settings(resp) + return resp + + class _ListSecuritySettings(SecuritySettingsServiceRestStub): + def __hash__(self): + return hash("ListSecuritySettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: security_settings.ListSecuritySettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> security_settings.ListSecuritySettingsResponse: + r"""Call the list security settings method over HTTP. + + Args: + request (~.security_settings.ListSecuritySettingsRequest): + The request object. The request message for + [SecuritySettings.ListSecuritySettings][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.security_settings.ListSecuritySettingsResponse: + The response message for + [SecuritySettings.ListSecuritySettings][]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*}/securitySettings", + }, + ] + request, metadata = self._interceptor.pre_list_security_settings( + request, metadata + ) + pb_request = security_settings.ListSecuritySettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = security_settings.ListSecuritySettingsResponse() + pb_resp = security_settings.ListSecuritySettingsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_security_settings(resp) + return resp + + class _UpdateSecuritySettings(SecuritySettingsServiceRestStub): + def __hash__(self): + return hash("UpdateSecuritySettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_security_settings.UpdateSecuritySettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_security_settings.SecuritySettings: + r"""Call the update security settings method over HTTP. + + Args: + request (~.gcdc_security_settings.UpdateSecuritySettingsRequest): + The request object. The request message for + [SecuritySettingsService.UpdateSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.UpdateSecuritySettings]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_security_settings.SecuritySettings: + Represents the settings related to + security issues, such as data redaction + and data retention. It may take hours + for updates on the settings to propagate + to all the related components and take + effect. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{security_settings.name=projects/*/locations/*/securitySettings/*}", + "body": "security_settings", + }, + ] + request, metadata = self._interceptor.pre_update_security_settings( + request, metadata + ) + pb_request = gcdc_security_settings.UpdateSecuritySettingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_security_settings.SecuritySettings() + pb_resp = gcdc_security_settings.SecuritySettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_security_settings(resp) + return resp + + @property + def create_security_settings( + self, + ) -> Callable[ + [gcdc_security_settings.CreateSecuritySettingsRequest], + gcdc_security_settings.SecuritySettings, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSecuritySettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_security_settings( + self, + ) -> Callable[[security_settings.DeleteSecuritySettingsRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSecuritySettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_security_settings( + self, + ) -> Callable[ + [security_settings.GetSecuritySettingsRequest], + security_settings.SecuritySettings, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSecuritySettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_security_settings( + self, + ) -> Callable[ + [security_settings.ListSecuritySettingsRequest], + security_settings.ListSecuritySettingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSecuritySettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_security_settings( + self, + ) -> Callable[ + [gcdc_security_settings.UpdateSecuritySettingsRequest], + gcdc_security_settings.SecuritySettings, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSecuritySettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(SecuritySettingsServiceRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(SecuritySettingsServiceRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SecuritySettingsServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SecuritySettingsServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SecuritySettingsServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SecuritySettingsServiceRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/__init__.py new file mode 100644 index 000000000000..d6b278bbd03d --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SessionEntityTypesAsyncClient +from .client import SessionEntityTypesClient + +__all__ = ( + "SessionEntityTypesClient", + "SessionEntityTypesAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py new file mode 100644 index 000000000000..47658913586c --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/async_client.py @@ -0,0 +1,1140 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.session_entity_types import pagers +from google.cloud.dialogflowcx_v3.types import ( + session_entity_type as gcdc_session_entity_type, +) +from google.cloud.dialogflowcx_v3.types import entity_type +from google.cloud.dialogflowcx_v3.types import session_entity_type + +from .client import SessionEntityTypesClient +from .transports.base import DEFAULT_CLIENT_INFO, SessionEntityTypesTransport +from .transports.grpc_asyncio import SessionEntityTypesGrpcAsyncIOTransport + + +class SessionEntityTypesAsyncClient: + """Service for managing + [SessionEntityTypes][google.cloud.dialogflow.cx.v3.SessionEntityType]. + """ + + _client: SessionEntityTypesClient + + DEFAULT_ENDPOINT = SessionEntityTypesClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SessionEntityTypesClient.DEFAULT_MTLS_ENDPOINT + + session_entity_type_path = staticmethod( + SessionEntityTypesClient.session_entity_type_path + ) + parse_session_entity_type_path = staticmethod( + SessionEntityTypesClient.parse_session_entity_type_path + ) + common_billing_account_path = staticmethod( + SessionEntityTypesClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SessionEntityTypesClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SessionEntityTypesClient.common_folder_path) + parse_common_folder_path = staticmethod( + SessionEntityTypesClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SessionEntityTypesClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SessionEntityTypesClient.parse_common_organization_path + ) + common_project_path = staticmethod(SessionEntityTypesClient.common_project_path) + parse_common_project_path = staticmethod( + SessionEntityTypesClient.parse_common_project_path + ) + common_location_path = staticmethod(SessionEntityTypesClient.common_location_path) + parse_common_location_path = staticmethod( + SessionEntityTypesClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SessionEntityTypesAsyncClient: The constructed client. + """ + return SessionEntityTypesClient.from_service_account_info.__func__(SessionEntityTypesAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SessionEntityTypesAsyncClient: The constructed client. + """ + return SessionEntityTypesClient.from_service_account_file.__func__(SessionEntityTypesAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SessionEntityTypesClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SessionEntityTypesTransport: + """Returns the transport used by the client instance. + + Returns: + SessionEntityTypesTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SessionEntityTypesClient).get_transport_class, + type(SessionEntityTypesClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SessionEntityTypesTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the session entity types client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SessionEntityTypesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SessionEntityTypesClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_session_entity_types( + self, + request: Optional[ + Union[session_entity_type.ListSessionEntityTypesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSessionEntityTypesAsyncPager: + r"""Returns the list of all session entity types in the + specified session. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_session_entity_types(): + # Create a client + client = dialogflowcx_v3.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListSessionEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_session_entity_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListSessionEntityTypesRequest, dict]]): + The request object. The request message for + [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.cx.v3.SessionEntityTypes.ListSessionEntityTypes]. + parent (:class:`str`): + Required. The session to list all session entity types + from. Format: + ``projects//locations//agents//sessions/`` + or + ``projects//locations//agents//environments//sessions/``. + If ``Environment ID`` is not specified, we assume + default 'draft' environment. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.session_entity_types.pagers.ListSessionEntityTypesAsyncPager: + The response message for + [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.cx.v3.SessionEntityTypes.ListSessionEntityTypes]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = session_entity_type.ListSessionEntityTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_session_entity_types, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSessionEntityTypesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_session_entity_type( + self, + request: Optional[ + Union[session_entity_type.GetSessionEntityTypeRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session_entity_type.SessionEntityType: + r"""Retrieves the specified session entity type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_session_entity_type(): + # Create a client + client = dialogflowcx_v3.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_session_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetSessionEntityTypeRequest, dict]]): + The request object. The request message for + [SessionEntityTypes.GetSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.GetSessionEntityType]. + name (:class:`str`): + Required. The name of the session entity type. Format: + ``projects//locations//agents//sessions//entityTypes/`` + or + ``projects//locations//agents//environments//sessions//entityTypes/``. + If ``Environment ID`` is not specified, we assume + default 'draft' environment. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SessionEntityType: + Session entity types are referred to as **User** entity types and are + entities that are built for an individual user such + as favorites, preferences, playlists, and so on. + + You can redefine a session entity type at the session + level to extend or replace a [custom entity + type][google.cloud.dialogflow.cx.v3.EntityType] at + the user session level (we refer to the entity types + defined at the agent level as "custom entity types"). + + Note: session entity types apply to all queries, + regardless of the language. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = session_entity_type.GetSessionEntityTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_session_entity_type, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_session_entity_type( + self, + request: Optional[ + Union[gcdc_session_entity_type.CreateSessionEntityTypeRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + session_entity_type: Optional[ + gcdc_session_entity_type.SessionEntityType + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_session_entity_type.SessionEntityType: + r"""Creates a session entity type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_session_entity_type(): + # Create a client + client = dialogflowcx_v3.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + session_entity_type = dialogflowcx_v3.SessionEntityType() + session_entity_type.name = "name_value" + session_entity_type.entity_override_mode = "ENTITY_OVERRIDE_MODE_SUPPLEMENT" + session_entity_type.entities.value = "value_value" + session_entity_type.entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflowcx_v3.CreateSessionEntityTypeRequest( + parent="parent_value", + session_entity_type=session_entity_type, + ) + + # Make the request + response = await client.create_session_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateSessionEntityTypeRequest, dict]]): + The request object. The request message for + [SessionEntityTypes.CreateSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.CreateSessionEntityType]. + parent (:class:`str`): + Required. The session to create a session entity type + for. Format: + ``projects//locations//agents//sessions/`` + or + ``projects//locations//agents//environments//sessions/``. + If ``Environment ID`` is not specified, we assume + default 'draft' environment. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + session_entity_type (:class:`google.cloud.dialogflowcx_v3.types.SessionEntityType`): + Required. The session entity type to + create. + + This corresponds to the ``session_entity_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SessionEntityType: + Session entity types are referred to as **User** entity types and are + entities that are built for an individual user such + as favorites, preferences, playlists, and so on. + + You can redefine a session entity type at the session + level to extend or replace a [custom entity + type][google.cloud.dialogflow.cx.v3.EntityType] at + the user session level (we refer to the entity types + defined at the agent level as "custom entity types"). + + Note: session entity types apply to all queries, + regardless of the language. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, session_entity_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_session_entity_type.CreateSessionEntityTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if session_entity_type is not None: + request.session_entity_type = session_entity_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_session_entity_type, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_session_entity_type( + self, + request: Optional[ + Union[gcdc_session_entity_type.UpdateSessionEntityTypeRequest, dict] + ] = None, + *, + session_entity_type: Optional[ + gcdc_session_entity_type.SessionEntityType + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_session_entity_type.SessionEntityType: + r"""Updates the specified session entity type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_session_entity_type(): + # Create a client + client = dialogflowcx_v3.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + session_entity_type = dialogflowcx_v3.SessionEntityType() + session_entity_type.name = "name_value" + session_entity_type.entity_override_mode = "ENTITY_OVERRIDE_MODE_SUPPLEMENT" + session_entity_type.entities.value = "value_value" + session_entity_type.entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflowcx_v3.UpdateSessionEntityTypeRequest( + session_entity_type=session_entity_type, + ) + + # Make the request + response = await client.update_session_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateSessionEntityTypeRequest, dict]]): + The request object. The request message for + [SessionEntityTypes.UpdateSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.UpdateSessionEntityType]. + session_entity_type (:class:`google.cloud.dialogflowcx_v3.types.SessionEntityType`): + Required. The session entity type to update. Format: + ``projects//locations//agents//sessions//entityTypes/`` + or + ``projects//locations//agents//environments//sessions//entityTypes/``. + If ``Environment ID`` is not specified, we assume + default 'draft' environment. + + This corresponds to the ``session_entity_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SessionEntityType: + Session entity types are referred to as **User** entity types and are + entities that are built for an individual user such + as favorites, preferences, playlists, and so on. + + You can redefine a session entity type at the session + level to extend or replace a [custom entity + type][google.cloud.dialogflow.cx.v3.EntityType] at + the user session level (we refer to the entity types + defined at the agent level as "custom entity types"). + + Note: session entity types apply to all queries, + regardless of the language. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session_entity_type, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_session_entity_type.UpdateSessionEntityTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session_entity_type is not None: + request.session_entity_type = session_entity_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_session_entity_type, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("session_entity_type.name", request.session_entity_type.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_session_entity_type( + self, + request: Optional[ + Union[session_entity_type.DeleteSessionEntityTypeRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified session entity type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_session_entity_type(): + # Create a client + client = dialogflowcx_v3.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_session_entity_type(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeleteSessionEntityTypeRequest, dict]]): + The request object. The request message for + [SessionEntityTypes.DeleteSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.DeleteSessionEntityType]. + name (:class:`str`): + Required. The name of the session entity type to delete. + Format: + ``projects//locations//agents//sessions//entityTypes/`` + or + ``projects//locations//agents//environments//sessions//entityTypes/``. + If ``Environment ID`` is not specified, we assume + default 'draft' environment. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = session_entity_type.DeleteSessionEntityTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_session_entity_type, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SessionEntityTypesAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SessionEntityTypesAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py new file mode 100644 index 000000000000..f55d514cb157 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py @@ -0,0 +1,1387 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.session_entity_types import pagers +from google.cloud.dialogflowcx_v3.types import ( + session_entity_type as gcdc_session_entity_type, +) +from google.cloud.dialogflowcx_v3.types import entity_type +from google.cloud.dialogflowcx_v3.types import session_entity_type + +from .transports.base import DEFAULT_CLIENT_INFO, SessionEntityTypesTransport +from .transports.grpc import SessionEntityTypesGrpcTransport +from .transports.grpc_asyncio import SessionEntityTypesGrpcAsyncIOTransport +from .transports.rest import SessionEntityTypesRestTransport + + +class SessionEntityTypesClientMeta(type): + """Metaclass for the SessionEntityTypes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SessionEntityTypesTransport]] + _transport_registry["grpc"] = SessionEntityTypesGrpcTransport + _transport_registry["grpc_asyncio"] = SessionEntityTypesGrpcAsyncIOTransport + _transport_registry["rest"] = SessionEntityTypesRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SessionEntityTypesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SessionEntityTypesClient(metaclass=SessionEntityTypesClientMeta): + """Service for managing + [SessionEntityTypes][google.cloud.dialogflow.cx.v3.SessionEntityType]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SessionEntityTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SessionEntityTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SessionEntityTypesTransport: + """Returns the transport used by the client instance. + + Returns: + SessionEntityTypesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def session_entity_type_path( + project: str, + location: str, + agent: str, + session: str, + entity_type: str, + ) -> str: + """Returns a fully-qualified session_entity_type string.""" + return "projects/{project}/locations/{location}/agents/{agent}/sessions/{session}/entityTypes/{entity_type}".format( + project=project, + location=location, + agent=agent, + session=session, + entity_type=entity_type, + ) + + @staticmethod + def parse_session_entity_type_path(path: str) -> Dict[str, str]: + """Parses a session_entity_type path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/sessions/(?P.+?)/entityTypes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SessionEntityTypesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the session entity types client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SessionEntityTypesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SessionEntityTypesTransport): + # transport is a SessionEntityTypesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_session_entity_types( + self, + request: Optional[ + Union[session_entity_type.ListSessionEntityTypesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSessionEntityTypesPager: + r"""Returns the list of all session entity types in the + specified session. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_session_entity_types(): + # Create a client + client = dialogflowcx_v3.SessionEntityTypesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListSessionEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_session_entity_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListSessionEntityTypesRequest, dict]): + The request object. The request message for + [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.cx.v3.SessionEntityTypes.ListSessionEntityTypes]. + parent (str): + Required. The session to list all session entity types + from. Format: + ``projects//locations//agents//sessions/`` + or + ``projects//locations//agents//environments//sessions/``. + If ``Environment ID`` is not specified, we assume + default 'draft' environment. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.session_entity_types.pagers.ListSessionEntityTypesPager: + The response message for + [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.cx.v3.SessionEntityTypes.ListSessionEntityTypes]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a session_entity_type.ListSessionEntityTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, session_entity_type.ListSessionEntityTypesRequest): + request = session_entity_type.ListSessionEntityTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_session_entity_types + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSessionEntityTypesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_session_entity_type( + self, + request: Optional[ + Union[session_entity_type.GetSessionEntityTypeRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session_entity_type.SessionEntityType: + r"""Retrieves the specified session entity type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_session_entity_type(): + # Create a client + client = dialogflowcx_v3.SessionEntityTypesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_session_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetSessionEntityTypeRequest, dict]): + The request object. The request message for + [SessionEntityTypes.GetSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.GetSessionEntityType]. + name (str): + Required. The name of the session entity type. Format: + ``projects//locations//agents//sessions//entityTypes/`` + or + ``projects//locations//agents//environments//sessions//entityTypes/``. + If ``Environment ID`` is not specified, we assume + default 'draft' environment. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SessionEntityType: + Session entity types are referred to as **User** entity types and are + entities that are built for an individual user such + as favorites, preferences, playlists, and so on. + + You can redefine a session entity type at the session + level to extend or replace a [custom entity + type][google.cloud.dialogflow.cx.v3.EntityType] at + the user session level (we refer to the entity types + defined at the agent level as "custom entity types"). + + Note: session entity types apply to all queries, + regardless of the language. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a session_entity_type.GetSessionEntityTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, session_entity_type.GetSessionEntityTypeRequest): + request = session_entity_type.GetSessionEntityTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_session_entity_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_session_entity_type( + self, + request: Optional[ + Union[gcdc_session_entity_type.CreateSessionEntityTypeRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + session_entity_type: Optional[ + gcdc_session_entity_type.SessionEntityType + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_session_entity_type.SessionEntityType: + r"""Creates a session entity type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_session_entity_type(): + # Create a client + client = dialogflowcx_v3.SessionEntityTypesClient() + + # Initialize request argument(s) + session_entity_type = dialogflowcx_v3.SessionEntityType() + session_entity_type.name = "name_value" + session_entity_type.entity_override_mode = "ENTITY_OVERRIDE_MODE_SUPPLEMENT" + session_entity_type.entities.value = "value_value" + session_entity_type.entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflowcx_v3.CreateSessionEntityTypeRequest( + parent="parent_value", + session_entity_type=session_entity_type, + ) + + # Make the request + response = client.create_session_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateSessionEntityTypeRequest, dict]): + The request object. The request message for + [SessionEntityTypes.CreateSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.CreateSessionEntityType]. + parent (str): + Required. The session to create a session entity type + for. Format: + ``projects//locations//agents//sessions/`` + or + ``projects//locations//agents//environments//sessions/``. + If ``Environment ID`` is not specified, we assume + default 'draft' environment. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + session_entity_type (google.cloud.dialogflowcx_v3.types.SessionEntityType): + Required. The session entity type to + create. + + This corresponds to the ``session_entity_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SessionEntityType: + Session entity types are referred to as **User** entity types and are + entities that are built for an individual user such + as favorites, preferences, playlists, and so on. + + You can redefine a session entity type at the session + level to extend or replace a [custom entity + type][google.cloud.dialogflow.cx.v3.EntityType] at + the user session level (we refer to the entity types + defined at the agent level as "custom entity types"). + + Note: session entity types apply to all queries, + regardless of the language. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, session_entity_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_session_entity_type.CreateSessionEntityTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, gcdc_session_entity_type.CreateSessionEntityTypeRequest + ): + request = gcdc_session_entity_type.CreateSessionEntityTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if session_entity_type is not None: + request.session_entity_type = session_entity_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_session_entity_type + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_session_entity_type( + self, + request: Optional[ + Union[gcdc_session_entity_type.UpdateSessionEntityTypeRequest, dict] + ] = None, + *, + session_entity_type: Optional[ + gcdc_session_entity_type.SessionEntityType + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_session_entity_type.SessionEntityType: + r"""Updates the specified session entity type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_session_entity_type(): + # Create a client + client = dialogflowcx_v3.SessionEntityTypesClient() + + # Initialize request argument(s) + session_entity_type = dialogflowcx_v3.SessionEntityType() + session_entity_type.name = "name_value" + session_entity_type.entity_override_mode = "ENTITY_OVERRIDE_MODE_SUPPLEMENT" + session_entity_type.entities.value = "value_value" + session_entity_type.entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflowcx_v3.UpdateSessionEntityTypeRequest( + session_entity_type=session_entity_type, + ) + + # Make the request + response = client.update_session_entity_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateSessionEntityTypeRequest, dict]): + The request object. The request message for + [SessionEntityTypes.UpdateSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.UpdateSessionEntityType]. + session_entity_type (google.cloud.dialogflowcx_v3.types.SessionEntityType): + Required. The session entity type to update. Format: + ``projects//locations//agents//sessions//entityTypes/`` + or + ``projects//locations//agents//environments//sessions//entityTypes/``. + If ``Environment ID`` is not specified, we assume + default 'draft' environment. + + This corresponds to the ``session_entity_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.SessionEntityType: + Session entity types are referred to as **User** entity types and are + entities that are built for an individual user such + as favorites, preferences, playlists, and so on. + + You can redefine a session entity type at the session + level to extend or replace a [custom entity + type][google.cloud.dialogflow.cx.v3.EntityType] at + the user session level (we refer to the entity types + defined at the agent level as "custom entity types"). + + Note: session entity types apply to all queries, + regardless of the language. + + For more information about entity types, see the + [Dialogflow + documentation](\ https://cloud.google.com/dialogflow/docs/entities-overview). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session_entity_type, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_session_entity_type.UpdateSessionEntityTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, gcdc_session_entity_type.UpdateSessionEntityTypeRequest + ): + request = gcdc_session_entity_type.UpdateSessionEntityTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session_entity_type is not None: + request.session_entity_type = session_entity_type + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_session_entity_type + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("session_entity_type.name", request.session_entity_type.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_session_entity_type( + self, + request: Optional[ + Union[session_entity_type.DeleteSessionEntityTypeRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified session entity type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_session_entity_type(): + # Create a client + client = dialogflowcx_v3.SessionEntityTypesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_session_entity_type(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeleteSessionEntityTypeRequest, dict]): + The request object. The request message for + [SessionEntityTypes.DeleteSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.DeleteSessionEntityType]. + name (str): + Required. The name of the session entity type to delete. + Format: + ``projects//locations//agents//sessions//entityTypes/`` + or + ``projects//locations//agents//environments//sessions//entityTypes/``. + If ``Environment ID`` is not specified, we assume + default 'draft' environment. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a session_entity_type.DeleteSessionEntityTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, session_entity_type.DeleteSessionEntityTypeRequest): + request = session_entity_type.DeleteSessionEntityTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_session_entity_type + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "SessionEntityTypesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SessionEntityTypesClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/pagers.py new file mode 100644 index 000000000000..d74a709ff47b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/pagers.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import session_entity_type + + +class ListSessionEntityTypesPager: + """A pager for iterating through ``list_session_entity_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListSessionEntityTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``session_entity_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSessionEntityTypes`` requests and continue to iterate + through the ``session_entity_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListSessionEntityTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., session_entity_type.ListSessionEntityTypesResponse], + request: session_entity_type.ListSessionEntityTypesRequest, + response: session_entity_type.ListSessionEntityTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListSessionEntityTypesRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListSessionEntityTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = session_entity_type.ListSessionEntityTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[session_entity_type.ListSessionEntityTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[session_entity_type.SessionEntityType]: + for page in self.pages: + yield from page.session_entity_types + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSessionEntityTypesAsyncPager: + """A pager for iterating through ``list_session_entity_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListSessionEntityTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``session_entity_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSessionEntityTypes`` requests and continue to iterate + through the ``session_entity_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListSessionEntityTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[session_entity_type.ListSessionEntityTypesResponse] + ], + request: session_entity_type.ListSessionEntityTypesRequest, + response: session_entity_type.ListSessionEntityTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListSessionEntityTypesRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListSessionEntityTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = session_entity_type.ListSessionEntityTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[session_entity_type.ListSessionEntityTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[session_entity_type.SessionEntityType]: + async def async_generator(): + async for page in self.pages: + for response in page.session_entity_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/__init__.py new file mode 100644 index 000000000000..55b8ec8ec1a3 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SessionEntityTypesTransport +from .grpc import SessionEntityTypesGrpcTransport +from .grpc_asyncio import SessionEntityTypesGrpcAsyncIOTransport +from .rest import SessionEntityTypesRestInterceptor, SessionEntityTypesRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[SessionEntityTypesTransport]] +_transport_registry["grpc"] = SessionEntityTypesGrpcTransport +_transport_registry["grpc_asyncio"] = SessionEntityTypesGrpcAsyncIOTransport +_transport_registry["rest"] = SessionEntityTypesRestTransport + +__all__ = ( + "SessionEntityTypesTransport", + "SessionEntityTypesGrpcTransport", + "SessionEntityTypesGrpcAsyncIOTransport", + "SessionEntityTypesRestTransport", + "SessionEntityTypesRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/base.py new file mode 100644 index 000000000000..ab718a98e9df --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/base.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import ( + session_entity_type as gcdc_session_entity_type, +) +from google.cloud.dialogflowcx_v3.types import session_entity_type + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SessionEntityTypesTransport(abc.ABC): + """Abstract transport class for SessionEntityTypes.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_session_entity_types: gapic_v1.method.wrap_method( + self.list_session_entity_types, + default_timeout=None, + client_info=client_info, + ), + self.get_session_entity_type: gapic_v1.method.wrap_method( + self.get_session_entity_type, + default_timeout=None, + client_info=client_info, + ), + self.create_session_entity_type: gapic_v1.method.wrap_method( + self.create_session_entity_type, + default_timeout=None, + client_info=client_info, + ), + self.update_session_entity_type: gapic_v1.method.wrap_method( + self.update_session_entity_type, + default_timeout=None, + client_info=client_info, + ), + self.delete_session_entity_type: gapic_v1.method.wrap_method( + self.delete_session_entity_type, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_session_entity_types( + self, + ) -> Callable[ + [session_entity_type.ListSessionEntityTypesRequest], + Union[ + session_entity_type.ListSessionEntityTypesResponse, + Awaitable[session_entity_type.ListSessionEntityTypesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_session_entity_type( + self, + ) -> Callable[ + [session_entity_type.GetSessionEntityTypeRequest], + Union[ + session_entity_type.SessionEntityType, + Awaitable[session_entity_type.SessionEntityType], + ], + ]: + raise NotImplementedError() + + @property + def create_session_entity_type( + self, + ) -> Callable[ + [gcdc_session_entity_type.CreateSessionEntityTypeRequest], + Union[ + gcdc_session_entity_type.SessionEntityType, + Awaitable[gcdc_session_entity_type.SessionEntityType], + ], + ]: + raise NotImplementedError() + + @property + def update_session_entity_type( + self, + ) -> Callable[ + [gcdc_session_entity_type.UpdateSessionEntityTypeRequest], + Union[ + gcdc_session_entity_type.SessionEntityType, + Awaitable[gcdc_session_entity_type.SessionEntityType], + ], + ]: + raise NotImplementedError() + + @property + def delete_session_entity_type( + self, + ) -> Callable[ + [session_entity_type.DeleteSessionEntityTypeRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SessionEntityTypesTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/grpc.py new file mode 100644 index 000000000000..0a2c774bd8fd --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/grpc.py @@ -0,0 +1,480 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + session_entity_type as gcdc_session_entity_type, +) +from google.cloud.dialogflowcx_v3.types import session_entity_type + +from .base import DEFAULT_CLIENT_INFO, SessionEntityTypesTransport + + +class SessionEntityTypesGrpcTransport(SessionEntityTypesTransport): + """gRPC backend transport for SessionEntityTypes. + + Service for managing + [SessionEntityTypes][google.cloud.dialogflow.cx.v3.SessionEntityType]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_session_entity_types( + self, + ) -> Callable[ + [session_entity_type.ListSessionEntityTypesRequest], + session_entity_type.ListSessionEntityTypesResponse, + ]: + r"""Return a callable for the list session entity types method over gRPC. + + Returns the list of all session entity types in the + specified session. + + Returns: + Callable[[~.ListSessionEntityTypesRequest], + ~.ListSessionEntityTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_session_entity_types" not in self._stubs: + self._stubs["list_session_entity_types"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SessionEntityTypes/ListSessionEntityTypes", + request_serializer=session_entity_type.ListSessionEntityTypesRequest.serialize, + response_deserializer=session_entity_type.ListSessionEntityTypesResponse.deserialize, + ) + return self._stubs["list_session_entity_types"] + + @property + def get_session_entity_type( + self, + ) -> Callable[ + [session_entity_type.GetSessionEntityTypeRequest], + session_entity_type.SessionEntityType, + ]: + r"""Return a callable for the get session entity type method over gRPC. + + Retrieves the specified session entity type. + + Returns: + Callable[[~.GetSessionEntityTypeRequest], + ~.SessionEntityType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_session_entity_type" not in self._stubs: + self._stubs["get_session_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SessionEntityTypes/GetSessionEntityType", + request_serializer=session_entity_type.GetSessionEntityTypeRequest.serialize, + response_deserializer=session_entity_type.SessionEntityType.deserialize, + ) + return self._stubs["get_session_entity_type"] + + @property + def create_session_entity_type( + self, + ) -> Callable[ + [gcdc_session_entity_type.CreateSessionEntityTypeRequest], + gcdc_session_entity_type.SessionEntityType, + ]: + r"""Return a callable for the create session entity type method over gRPC. + + Creates a session entity type. + + Returns: + Callable[[~.CreateSessionEntityTypeRequest], + ~.SessionEntityType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_session_entity_type" not in self._stubs: + self._stubs["create_session_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SessionEntityTypes/CreateSessionEntityType", + request_serializer=gcdc_session_entity_type.CreateSessionEntityTypeRequest.serialize, + response_deserializer=gcdc_session_entity_type.SessionEntityType.deserialize, + ) + return self._stubs["create_session_entity_type"] + + @property + def update_session_entity_type( + self, + ) -> Callable[ + [gcdc_session_entity_type.UpdateSessionEntityTypeRequest], + gcdc_session_entity_type.SessionEntityType, + ]: + r"""Return a callable for the update session entity type method over gRPC. + + Updates the specified session entity type. + + Returns: + Callable[[~.UpdateSessionEntityTypeRequest], + ~.SessionEntityType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_session_entity_type" not in self._stubs: + self._stubs["update_session_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SessionEntityTypes/UpdateSessionEntityType", + request_serializer=gcdc_session_entity_type.UpdateSessionEntityTypeRequest.serialize, + response_deserializer=gcdc_session_entity_type.SessionEntityType.deserialize, + ) + return self._stubs["update_session_entity_type"] + + @property + def delete_session_entity_type( + self, + ) -> Callable[ + [session_entity_type.DeleteSessionEntityTypeRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete session entity type method over gRPC. + + Deletes the specified session entity type. + + Returns: + Callable[[~.DeleteSessionEntityTypeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_session_entity_type" not in self._stubs: + self._stubs["delete_session_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SessionEntityTypes/DeleteSessionEntityType", + request_serializer=session_entity_type.DeleteSessionEntityTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_session_entity_type"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SessionEntityTypesGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c35d14995ec3 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/grpc_asyncio.py @@ -0,0 +1,479 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + session_entity_type as gcdc_session_entity_type, +) +from google.cloud.dialogflowcx_v3.types import session_entity_type + +from .base import DEFAULT_CLIENT_INFO, SessionEntityTypesTransport +from .grpc import SessionEntityTypesGrpcTransport + + +class SessionEntityTypesGrpcAsyncIOTransport(SessionEntityTypesTransport): + """gRPC AsyncIO backend transport for SessionEntityTypes. + + Service for managing + [SessionEntityTypes][google.cloud.dialogflow.cx.v3.SessionEntityType]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_session_entity_types( + self, + ) -> Callable[ + [session_entity_type.ListSessionEntityTypesRequest], + Awaitable[session_entity_type.ListSessionEntityTypesResponse], + ]: + r"""Return a callable for the list session entity types method over gRPC. + + Returns the list of all session entity types in the + specified session. + + Returns: + Callable[[~.ListSessionEntityTypesRequest], + Awaitable[~.ListSessionEntityTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_session_entity_types" not in self._stubs: + self._stubs["list_session_entity_types"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SessionEntityTypes/ListSessionEntityTypes", + request_serializer=session_entity_type.ListSessionEntityTypesRequest.serialize, + response_deserializer=session_entity_type.ListSessionEntityTypesResponse.deserialize, + ) + return self._stubs["list_session_entity_types"] + + @property + def get_session_entity_type( + self, + ) -> Callable[ + [session_entity_type.GetSessionEntityTypeRequest], + Awaitable[session_entity_type.SessionEntityType], + ]: + r"""Return a callable for the get session entity type method over gRPC. + + Retrieves the specified session entity type. + + Returns: + Callable[[~.GetSessionEntityTypeRequest], + Awaitable[~.SessionEntityType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_session_entity_type" not in self._stubs: + self._stubs["get_session_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SessionEntityTypes/GetSessionEntityType", + request_serializer=session_entity_type.GetSessionEntityTypeRequest.serialize, + response_deserializer=session_entity_type.SessionEntityType.deserialize, + ) + return self._stubs["get_session_entity_type"] + + @property + def create_session_entity_type( + self, + ) -> Callable[ + [gcdc_session_entity_type.CreateSessionEntityTypeRequest], + Awaitable[gcdc_session_entity_type.SessionEntityType], + ]: + r"""Return a callable for the create session entity type method over gRPC. + + Creates a session entity type. + + Returns: + Callable[[~.CreateSessionEntityTypeRequest], + Awaitable[~.SessionEntityType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_session_entity_type" not in self._stubs: + self._stubs["create_session_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SessionEntityTypes/CreateSessionEntityType", + request_serializer=gcdc_session_entity_type.CreateSessionEntityTypeRequest.serialize, + response_deserializer=gcdc_session_entity_type.SessionEntityType.deserialize, + ) + return self._stubs["create_session_entity_type"] + + @property + def update_session_entity_type( + self, + ) -> Callable[ + [gcdc_session_entity_type.UpdateSessionEntityTypeRequest], + Awaitable[gcdc_session_entity_type.SessionEntityType], + ]: + r"""Return a callable for the update session entity type method over gRPC. + + Updates the specified session entity type. + + Returns: + Callable[[~.UpdateSessionEntityTypeRequest], + Awaitable[~.SessionEntityType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_session_entity_type" not in self._stubs: + self._stubs["update_session_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SessionEntityTypes/UpdateSessionEntityType", + request_serializer=gcdc_session_entity_type.UpdateSessionEntityTypeRequest.serialize, + response_deserializer=gcdc_session_entity_type.SessionEntityType.deserialize, + ) + return self._stubs["update_session_entity_type"] + + @property + def delete_session_entity_type( + self, + ) -> Callable[ + [session_entity_type.DeleteSessionEntityTypeRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete session entity type method over gRPC. + + Deletes the specified session entity type. + + Returns: + Callable[[~.DeleteSessionEntityTypeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_session_entity_type" not in self._stubs: + self._stubs["delete_session_entity_type"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.SessionEntityTypes/DeleteSessionEntityType", + request_serializer=session_entity_type.DeleteSessionEntityTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_session_entity_type"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("SessionEntityTypesGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/rest.py new file mode 100644 index 000000000000..3886d98641d5 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/rest.py @@ -0,0 +1,1380 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + session_entity_type as gcdc_session_entity_type, +) +from google.cloud.dialogflowcx_v3.types import session_entity_type + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SessionEntityTypesTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SessionEntityTypesRestInterceptor: + """Interceptor for SessionEntityTypes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SessionEntityTypesRestTransport. + + .. code-block:: python + class MyCustomSessionEntityTypesInterceptor(SessionEntityTypesRestInterceptor): + def pre_create_session_entity_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_session_entity_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_session_entity_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_session_entity_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_session_entity_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_session_entity_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_session_entity_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_session_entity_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_session_entity_type(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SessionEntityTypesRestTransport(interceptor=MyCustomSessionEntityTypesInterceptor()) + client = SessionEntityTypesClient(transport=transport) + + + """ + + def pre_create_session_entity_type( + self, + request: gcdc_session_entity_type.CreateSessionEntityTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + gcdc_session_entity_type.CreateSessionEntityTypeRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_session_entity_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the SessionEntityTypes server. + """ + return request, metadata + + def post_create_session_entity_type( + self, response: gcdc_session_entity_type.SessionEntityType + ) -> gcdc_session_entity_type.SessionEntityType: + """Post-rpc interceptor for create_session_entity_type + + Override in a subclass to manipulate the response + after it is returned by the SessionEntityTypes server but before + it is returned to user code. + """ + return response + + def pre_delete_session_entity_type( + self, + request: session_entity_type.DeleteSessionEntityTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + session_entity_type.DeleteSessionEntityTypeRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_session_entity_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the SessionEntityTypes server. + """ + return request, metadata + + def pre_get_session_entity_type( + self, + request: session_entity_type.GetSessionEntityTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + session_entity_type.GetSessionEntityTypeRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_session_entity_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the SessionEntityTypes server. + """ + return request, metadata + + def post_get_session_entity_type( + self, response: session_entity_type.SessionEntityType + ) -> session_entity_type.SessionEntityType: + """Post-rpc interceptor for get_session_entity_type + + Override in a subclass to manipulate the response + after it is returned by the SessionEntityTypes server but before + it is returned to user code. + """ + return response + + def pre_list_session_entity_types( + self, + request: session_entity_type.ListSessionEntityTypesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + session_entity_type.ListSessionEntityTypesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_session_entity_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the SessionEntityTypes server. + """ + return request, metadata + + def post_list_session_entity_types( + self, response: session_entity_type.ListSessionEntityTypesResponse + ) -> session_entity_type.ListSessionEntityTypesResponse: + """Post-rpc interceptor for list_session_entity_types + + Override in a subclass to manipulate the response + after it is returned by the SessionEntityTypes server but before + it is returned to user code. + """ + return response + + def pre_update_session_entity_type( + self, + request: gcdc_session_entity_type.UpdateSessionEntityTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + gcdc_session_entity_type.UpdateSessionEntityTypeRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for update_session_entity_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the SessionEntityTypes server. + """ + return request, metadata + + def post_update_session_entity_type( + self, response: gcdc_session_entity_type.SessionEntityType + ) -> gcdc_session_entity_type.SessionEntityType: + """Post-rpc interceptor for update_session_entity_type + + Override in a subclass to manipulate the response + after it is returned by the SessionEntityTypes server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the SessionEntityTypes server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the SessionEntityTypes server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SessionEntityTypes server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the SessionEntityTypes server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SessionEntityTypes server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SessionEntityTypes server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SessionEntityTypes server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SessionEntityTypes server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SessionEntityTypes server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SessionEntityTypes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SessionEntityTypesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SessionEntityTypesRestInterceptor + + +class SessionEntityTypesRestTransport(SessionEntityTypesTransport): + """REST backend transport for SessionEntityTypes. + + Service for managing + [SessionEntityTypes][google.cloud.dialogflow.cx.v3.SessionEntityType]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SessionEntityTypesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SessionEntityTypesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateSessionEntityType(SessionEntityTypesRestStub): + def __hash__(self): + return hash("CreateSessionEntityType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_session_entity_type.CreateSessionEntityTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_session_entity_type.SessionEntityType: + r"""Call the create session entity + type method over HTTP. + + Args: + request (~.gcdc_session_entity_type.CreateSessionEntityTypeRequest): + The request object. The request message for + [SessionEntityTypes.CreateSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.CreateSessionEntityType]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_session_entity_type.SessionEntityType: + Session entity types are referred to as **User** entity + types and are entities that are built for an individual + user such as favorites, preferences, playlists, and so + on. + + You can redefine a session entity type at the session + level to extend or replace a [custom entity + type][google.cloud.dialogflow.cx.v3.EntityType] at the + user session level (we refer to the entity types defined + at the agent level as "custom entity types"). + + Note: session entity types apply to all queries, + regardless of the language. + + For more information about entity types, see the + `Dialogflow + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/sessions/*}/entityTypes", + "body": "session_entity_type", + }, + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/environments/*/sessions/*}/entityTypes", + "body": "session_entity_type", + }, + ] + request, metadata = self._interceptor.pre_create_session_entity_type( + request, metadata + ) + pb_request = gcdc_session_entity_type.CreateSessionEntityTypeRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_session_entity_type.SessionEntityType() + pb_resp = gcdc_session_entity_type.SessionEntityType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_session_entity_type(resp) + return resp + + class _DeleteSessionEntityType(SessionEntityTypesRestStub): + def __hash__(self): + return hash("DeleteSessionEntityType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: session_entity_type.DeleteSessionEntityTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete session entity + type method over HTTP. + + Args: + request (~.session_entity_type.DeleteSessionEntityTypeRequest): + The request object. The request message for + [SessionEntityTypes.DeleteSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.DeleteSessionEntityType]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/sessions/*/entityTypes/*}", + }, + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/environments/*/sessions/*/entityTypes/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_session_entity_type( + request, metadata + ) + pb_request = session_entity_type.DeleteSessionEntityTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetSessionEntityType(SessionEntityTypesRestStub): + def __hash__(self): + return hash("GetSessionEntityType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: session_entity_type.GetSessionEntityTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session_entity_type.SessionEntityType: + r"""Call the get session entity type method over HTTP. + + Args: + request (~.session_entity_type.GetSessionEntityTypeRequest): + The request object. The request message for + [SessionEntityTypes.GetSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.GetSessionEntityType]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.session_entity_type.SessionEntityType: + Session entity types are referred to as **User** entity + types and are entities that are built for an individual + user such as favorites, preferences, playlists, and so + on. + + You can redefine a session entity type at the session + level to extend or replace a [custom entity + type][google.cloud.dialogflow.cx.v3.EntityType] at the + user session level (we refer to the entity types defined + at the agent level as "custom entity types"). + + Note: session entity types apply to all queries, + regardless of the language. + + For more information about entity types, see the + `Dialogflow + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/sessions/*/entityTypes/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/environments/*/sessions/*/entityTypes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_session_entity_type( + request, metadata + ) + pb_request = session_entity_type.GetSessionEntityTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = session_entity_type.SessionEntityType() + pb_resp = session_entity_type.SessionEntityType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_session_entity_type(resp) + return resp + + class _ListSessionEntityTypes(SessionEntityTypesRestStub): + def __hash__(self): + return hash("ListSessionEntityTypes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: session_entity_type.ListSessionEntityTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session_entity_type.ListSessionEntityTypesResponse: + r"""Call the list session entity types method over HTTP. + + Args: + request (~.session_entity_type.ListSessionEntityTypesRequest): + The request object. The request message for + [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.cx.v3.SessionEntityTypes.ListSessionEntityTypes]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.session_entity_type.ListSessionEntityTypesResponse: + The response message for + [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.cx.v3.SessionEntityTypes.ListSessionEntityTypes]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/sessions/*}/entityTypes", + }, + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/environments/*/sessions/*}/entityTypes", + }, + ] + request, metadata = self._interceptor.pre_list_session_entity_types( + request, metadata + ) + pb_request = session_entity_type.ListSessionEntityTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = session_entity_type.ListSessionEntityTypesResponse() + pb_resp = session_entity_type.ListSessionEntityTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_session_entity_types(resp) + return resp + + class _UpdateSessionEntityType(SessionEntityTypesRestStub): + def __hash__(self): + return hash("UpdateSessionEntityType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_session_entity_type.UpdateSessionEntityTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_session_entity_type.SessionEntityType: + r"""Call the update session entity + type method over HTTP. + + Args: + request (~.gcdc_session_entity_type.UpdateSessionEntityTypeRequest): + The request object. The request message for + [SessionEntityTypes.UpdateSessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityTypes.UpdateSessionEntityType]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_session_entity_type.SessionEntityType: + Session entity types are referred to as **User** entity + types and are entities that are built for an individual + user such as favorites, preferences, playlists, and so + on. + + You can redefine a session entity type at the session + level to extend or replace a [custom entity + type][google.cloud.dialogflow.cx.v3.EntityType] at the + user session level (we refer to the entity types defined + at the agent level as "custom entity types"). + + Note: session entity types apply to all queries, + regardless of the language. + + For more information about entity types, see the + `Dialogflow + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{session_entity_type.name=projects/*/locations/*/agents/*/sessions/*/entityTypes/*}", + "body": "session_entity_type", + }, + { + "method": "patch", + "uri": "/v3/{session_entity_type.name=projects/*/locations/*/agents/*/environments/*/sessions/*/entityTypes/*}", + "body": "session_entity_type", + }, + ] + request, metadata = self._interceptor.pre_update_session_entity_type( + request, metadata + ) + pb_request = gcdc_session_entity_type.UpdateSessionEntityTypeRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_session_entity_type.SessionEntityType() + pb_resp = gcdc_session_entity_type.SessionEntityType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_session_entity_type(resp) + return resp + + @property + def create_session_entity_type( + self, + ) -> Callable[ + [gcdc_session_entity_type.CreateSessionEntityTypeRequest], + gcdc_session_entity_type.SessionEntityType, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSessionEntityType(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_session_entity_type( + self, + ) -> Callable[ + [session_entity_type.DeleteSessionEntityTypeRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSessionEntityType(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_session_entity_type( + self, + ) -> Callable[ + [session_entity_type.GetSessionEntityTypeRequest], + session_entity_type.SessionEntityType, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSessionEntityType(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_session_entity_types( + self, + ) -> Callable[ + [session_entity_type.ListSessionEntityTypesRequest], + session_entity_type.ListSessionEntityTypesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSessionEntityTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_session_entity_type( + self, + ) -> Callable[ + [gcdc_session_entity_type.UpdateSessionEntityTypeRequest], + gcdc_session_entity_type.SessionEntityType, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSessionEntityType(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(SessionEntityTypesRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(SessionEntityTypesRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SessionEntityTypesRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SessionEntityTypesRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SessionEntityTypesRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SessionEntityTypesRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/__init__.py new file mode 100644 index 000000000000..d699d6d9df8c --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SessionsAsyncClient +from .client import SessionsClient + +__all__ = ( + "SessionsClient", + "SessionsAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/async_client.py new file mode 100644 index 000000000000..a145a399b79d --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/async_client.py @@ -0,0 +1,954 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + AsyncIterable, + AsyncIterator, + Awaitable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import audio_config, page, session + +from .client import SessionsClient +from .transports.base import DEFAULT_CLIENT_INFO, SessionsTransport +from .transports.grpc_asyncio import SessionsGrpcAsyncIOTransport + + +class SessionsAsyncClient: + """A session represents an interaction with a user. You retrieve user + input and pass it to the + [DetectIntent][google.cloud.dialogflow.cx.v3.Sessions.DetectIntent] + method to determine user intent and respond. + """ + + _client: SessionsClient + + DEFAULT_ENDPOINT = SessionsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SessionsClient.DEFAULT_MTLS_ENDPOINT + + entity_type_path = staticmethod(SessionsClient.entity_type_path) + parse_entity_type_path = staticmethod(SessionsClient.parse_entity_type_path) + flow_path = staticmethod(SessionsClient.flow_path) + parse_flow_path = staticmethod(SessionsClient.parse_flow_path) + intent_path = staticmethod(SessionsClient.intent_path) + parse_intent_path = staticmethod(SessionsClient.parse_intent_path) + page_path = staticmethod(SessionsClient.page_path) + parse_page_path = staticmethod(SessionsClient.parse_page_path) + session_path = staticmethod(SessionsClient.session_path) + parse_session_path = staticmethod(SessionsClient.parse_session_path) + session_entity_type_path = staticmethod(SessionsClient.session_entity_type_path) + parse_session_entity_type_path = staticmethod( + SessionsClient.parse_session_entity_type_path + ) + transition_route_group_path = staticmethod( + SessionsClient.transition_route_group_path + ) + parse_transition_route_group_path = staticmethod( + SessionsClient.parse_transition_route_group_path + ) + version_path = staticmethod(SessionsClient.version_path) + parse_version_path = staticmethod(SessionsClient.parse_version_path) + webhook_path = staticmethod(SessionsClient.webhook_path) + parse_webhook_path = staticmethod(SessionsClient.parse_webhook_path) + common_billing_account_path = staticmethod( + SessionsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SessionsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SessionsClient.common_folder_path) + parse_common_folder_path = staticmethod(SessionsClient.parse_common_folder_path) + common_organization_path = staticmethod(SessionsClient.common_organization_path) + parse_common_organization_path = staticmethod( + SessionsClient.parse_common_organization_path + ) + common_project_path = staticmethod(SessionsClient.common_project_path) + parse_common_project_path = staticmethod(SessionsClient.parse_common_project_path) + common_location_path = staticmethod(SessionsClient.common_location_path) + parse_common_location_path = staticmethod(SessionsClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SessionsAsyncClient: The constructed client. + """ + return SessionsClient.from_service_account_info.__func__(SessionsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SessionsAsyncClient: The constructed client. + """ + return SessionsClient.from_service_account_file.__func__(SessionsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SessionsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SessionsTransport: + """Returns the transport used by the client instance. + + Returns: + SessionsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SessionsClient).get_transport_class, type(SessionsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SessionsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the sessions client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SessionsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SessionsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def detect_intent( + self, + request: Optional[Union[session.DetectIntentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.DetectIntentResponse: + r"""Processes a natural language query and returns structured, + actionable data as a result. This method is not idempotent, + because it may cause session entity types to be updated, which + in turn might affect results of future queries. + + Note: Always use agent versions for production traffic. See + `Versions and + environments `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_detect_intent(): + # Create a client + client = dialogflowcx_v3.SessionsAsyncClient() + + # Initialize request argument(s) + query_input = dialogflowcx_v3.QueryInput() + query_input.text.text = "text_value" + query_input.language_code = "language_code_value" + + request = dialogflowcx_v3.DetectIntentRequest( + session="session_value", + query_input=query_input, + ) + + # Make the request + response = await client.detect_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DetectIntentRequest, dict]]): + The request object. The request to detect user's intent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.DetectIntentResponse: + The message returned from the + DetectIntent method. + + """ + # Create or coerce a protobuf request object. + request = session.DetectIntentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.detect_intent, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=220.0, + ), + default_timeout=220.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("session", request.session),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_detect_intent( + self, + requests: Optional[AsyncIterator[session.StreamingDetectIntentRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[session.StreamingDetectIntentResponse]]: + r"""Processes a natural language query in audio format in a + streaming fashion and returns structured, actionable data as a + result. This method is only available via the gRPC API (not + REST). + + Note: Always use agent versions for production traffic. See + `Versions and + environments `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_streaming_detect_intent(): + # Create a client + client = dialogflowcx_v3.SessionsAsyncClient() + + # Initialize request argument(s) + query_input = dialogflowcx_v3.QueryInput() + query_input.text.text = "text_value" + query_input.language_code = "language_code_value" + + request = dialogflowcx_v3.StreamingDetectIntentRequest( + query_input=query_input, + ) + + # This method expects an iterator which contains + # 'dialogflowcx_v3.StreamingDetectIntentRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_detect_intent(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.cloud.dialogflowcx_v3.types.StreamingDetectIntentRequest`]): + The request object AsyncIterator. The top-level message sent by the client to the + [Sessions.StreamingDetectIntent][google.cloud.dialogflow.cx.v3.Sessions.StreamingDetectIntent] + method. + + Multiple request messages should be sent in order: + + 1. The first message must contain + [session][google.cloud.dialogflow.cx.v3.StreamingDetectIntentRequest.session], + [query_input][google.cloud.dialogflow.cx.v3.StreamingDetectIntentRequest.query_input] + plus optionally + [query_params][google.cloud.dialogflow.cx.v3.StreamingDetectIntentRequest.query_params]. + If the client wants to receive an audio response, it + should also contain + [output_audio_config][google.cloud.dialogflow.cx.v3.StreamingDetectIntentRequest.output_audio_config]. + + 2. If + [query_input][google.cloud.dialogflow.cx.v3.StreamingDetectIntentRequest.query_input] + was set to + [query_input.audio.config][google.cloud.dialogflow.cx.v3.AudioInput.config], + all subsequent messages must contain + [query_input.audio.audio][google.cloud.dialogflow.cx.v3.AudioInput.audio] + to continue with Speech recognition. If you decide to + rather detect an intent from text input after you + already started Speech recognition, please send a + message with + [query_input.text][google.cloud.dialogflow.cx.v3.QueryInput.text]. + + However, note that: + + - Dialogflow will bill you for the audio duration so + far. + - Dialogflow discards all Speech recognition results + in favor of the input text. + - Dialogflow will use the language code from the + first message. + + After you sent all input, you must half-close or abort + the request stream. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.dialogflowcx_v3.types.StreamingDetectIntentResponse]: + The top-level message returned from the + [StreamingDetectIntent][google.cloud.dialogflow.cx.v3.Sessions.StreamingDetectIntent] + method. + + Multiple response messages (N) can be returned in + order. + + The first (N-1) responses set either the + recognition_result or detect_intent_response field, + depending on the request: + + - If the + StreamingDetectIntentRequest.query_input.audio + field was set, and the + StreamingDetectIntentRequest.enable_partial_response + field was false, the recognition_result field is + populated for each of the (N-1) responses. See the + [StreamingRecognitionResult][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult] + message for details about the result message + sequence. + - If the + StreamingDetectIntentRequest.enable_partial_response + field was true, the detect_intent_response field + is populated for each of the (N-1) responses, + where 1 <= N <= 4. These responses set the + [DetectIntentResponse.response_type][google.cloud.dialogflow.cx.v3.DetectIntentResponse.response_type] + field to PARTIAL. + + For the final Nth response message, the + detect_intent_response is fully populated, and + [DetectIntentResponse.response_type][google.cloud.dialogflow.cx.v3.DetectIntentResponse.response_type] + is set to FINAL. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.streaming_detect_intent, + default_timeout=220.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def match_intent( + self, + request: Optional[Union[session.MatchIntentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.MatchIntentResponse: + r"""Returns preliminary intent match results, doesn't + change the session status. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_match_intent(): + # Create a client + client = dialogflowcx_v3.SessionsAsyncClient() + + # Initialize request argument(s) + query_input = dialogflowcx_v3.QueryInput() + query_input.text.text = "text_value" + query_input.language_code = "language_code_value" + + request = dialogflowcx_v3.MatchIntentRequest( + session="session_value", + query_input=query_input, + ) + + # Make the request + response = await client.match_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.MatchIntentRequest, dict]]): + The request object. Request of [MatchIntent][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.MatchIntentResponse: + Response of [MatchIntent][]. + """ + # Create or coerce a protobuf request object. + request = session.MatchIntentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.match_intent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("session", request.session),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fulfill_intent( + self, + request: Optional[Union[session.FulfillIntentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.FulfillIntentResponse: + r"""Fulfills a matched intent returned by + [MatchIntent][google.cloud.dialogflow.cx.v3.Sessions.MatchIntent]. + Must be called after + [MatchIntent][google.cloud.dialogflow.cx.v3.Sessions.MatchIntent], + with input from + [MatchIntentResponse][google.cloud.dialogflow.cx.v3.MatchIntentResponse]. + Otherwise, the behavior is undefined. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_fulfill_intent(): + # Create a client + client = dialogflowcx_v3.SessionsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.FulfillIntentRequest( + ) + + # Make the request + response = await client.fulfill_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.FulfillIntentRequest, dict]]): + The request object. Request of [FulfillIntent][] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.FulfillIntentResponse: + Response of [FulfillIntent][] + """ + # Create or coerce a protobuf request object. + request = session.FulfillIntentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fulfill_intent, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "match_intent_request.session", + request.match_intent_request.session, + ), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SessionsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SessionsAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py new file mode 100644 index 000000000000..c7a695931c2c --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py @@ -0,0 +1,1365 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Iterable, + Iterator, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import audio_config, page, session + +from .transports.base import DEFAULT_CLIENT_INFO, SessionsTransport +from .transports.grpc import SessionsGrpcTransport +from .transports.grpc_asyncio import SessionsGrpcAsyncIOTransport +from .transports.rest import SessionsRestTransport + + +class SessionsClientMeta(type): + """Metaclass for the Sessions client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[SessionsTransport]] + _transport_registry["grpc"] = SessionsGrpcTransport + _transport_registry["grpc_asyncio"] = SessionsGrpcAsyncIOTransport + _transport_registry["rest"] = SessionsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SessionsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SessionsClient(metaclass=SessionsClientMeta): + """A session represents an interaction with a user. You retrieve user + input and pass it to the + [DetectIntent][google.cloud.dialogflow.cx.v3.Sessions.DetectIntent] + method to determine user intent and respond. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SessionsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SessionsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SessionsTransport: + """Returns the transport used by the client instance. + + Returns: + SessionsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def entity_type_path( + project: str, + location: str, + agent: str, + entity_type: str, + ) -> str: + """Returns a fully-qualified entity_type string.""" + return "projects/{project}/locations/{location}/agents/{agent}/entityTypes/{entity_type}".format( + project=project, + location=location, + agent=agent, + entity_type=entity_type, + ) + + @staticmethod + def parse_entity_type_path(path: str) -> Dict[str, str]: + """Parses a entity_type path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/entityTypes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def flow_path( + project: str, + location: str, + agent: str, + flow: str, + ) -> str: + """Returns a fully-qualified flow string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}".format( + project=project, + location=location, + agent=agent, + flow=flow, + ) + + @staticmethod + def parse_flow_path(path: str) -> Dict[str, str]: + """Parses a flow path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def intent_path( + project: str, + location: str, + agent: str, + intent: str, + ) -> str: + """Returns a fully-qualified intent string.""" + return "projects/{project}/locations/{location}/agents/{agent}/intents/{intent}".format( + project=project, + location=location, + agent=agent, + intent=intent, + ) + + @staticmethod + def parse_intent_path(path: str) -> Dict[str, str]: + """Parses a intent path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/intents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def page_path( + project: str, + location: str, + agent: str, + flow: str, + page: str, + ) -> str: + """Returns a fully-qualified page string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/pages/{page}".format( + project=project, + location=location, + agent=agent, + flow=flow, + page=page, + ) + + @staticmethod + def parse_page_path(path: str) -> Dict[str, str]: + """Parses a page path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/pages/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def session_path( + project: str, + location: str, + agent: str, + session: str, + ) -> str: + """Returns a fully-qualified session string.""" + return "projects/{project}/locations/{location}/agents/{agent}/sessions/{session}".format( + project=project, + location=location, + agent=agent, + session=session, + ) + + @staticmethod + def parse_session_path(path: str) -> Dict[str, str]: + """Parses a session path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/sessions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def session_entity_type_path( + project: str, + location: str, + agent: str, + session: str, + entity_type: str, + ) -> str: + """Returns a fully-qualified session_entity_type string.""" + return "projects/{project}/locations/{location}/agents/{agent}/sessions/{session}/entityTypes/{entity_type}".format( + project=project, + location=location, + agent=agent, + session=session, + entity_type=entity_type, + ) + + @staticmethod + def parse_session_entity_type_path(path: str) -> Dict[str, str]: + """Parses a session_entity_type path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/sessions/(?P.+?)/entityTypes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def transition_route_group_path( + project: str, + location: str, + agent: str, + flow: str, + transition_route_group: str, + ) -> str: + """Returns a fully-qualified transition_route_group string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/transitionRouteGroups/{transition_route_group}".format( + project=project, + location=location, + agent=agent, + flow=flow, + transition_route_group=transition_route_group, + ) + + @staticmethod + def parse_transition_route_group_path(path: str) -> Dict[str, str]: + """Parses a transition_route_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/transitionRouteGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def version_path( + project: str, + location: str, + agent: str, + flow: str, + version: str, + ) -> str: + """Returns a fully-qualified version string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/versions/{version}".format( + project=project, + location=location, + agent=agent, + flow=flow, + version=version, + ) + + @staticmethod + def parse_version_path(path: str) -> Dict[str, str]: + """Parses a version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/versions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def webhook_path( + project: str, + location: str, + agent: str, + webhook: str, + ) -> str: + """Returns a fully-qualified webhook string.""" + return "projects/{project}/locations/{location}/agents/{agent}/webhooks/{webhook}".format( + project=project, + location=location, + agent=agent, + webhook=webhook, + ) + + @staticmethod + def parse_webhook_path(path: str) -> Dict[str, str]: + """Parses a webhook path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/webhooks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SessionsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the sessions client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SessionsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SessionsTransport): + # transport is a SessionsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def detect_intent( + self, + request: Optional[Union[session.DetectIntentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.DetectIntentResponse: + r"""Processes a natural language query and returns structured, + actionable data as a result. This method is not idempotent, + because it may cause session entity types to be updated, which + in turn might affect results of future queries. + + Note: Always use agent versions for production traffic. See + `Versions and + environments `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_detect_intent(): + # Create a client + client = dialogflowcx_v3.SessionsClient() + + # Initialize request argument(s) + query_input = dialogflowcx_v3.QueryInput() + query_input.text.text = "text_value" + query_input.language_code = "language_code_value" + + request = dialogflowcx_v3.DetectIntentRequest( + session="session_value", + query_input=query_input, + ) + + # Make the request + response = client.detect_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DetectIntentRequest, dict]): + The request object. The request to detect user's intent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.DetectIntentResponse: + The message returned from the + DetectIntent method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a session.DetectIntentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, session.DetectIntentRequest): + request = session.DetectIntentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detect_intent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("session", request.session),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_detect_intent( + self, + requests: Optional[Iterator[session.StreamingDetectIntentRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[session.StreamingDetectIntentResponse]: + r"""Processes a natural language query in audio format in a + streaming fashion and returns structured, actionable data as a + result. This method is only available via the gRPC API (not + REST). + + Note: Always use agent versions for production traffic. See + `Versions and + environments `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_streaming_detect_intent(): + # Create a client + client = dialogflowcx_v3.SessionsClient() + + # Initialize request argument(s) + query_input = dialogflowcx_v3.QueryInput() + query_input.text.text = "text_value" + query_input.language_code = "language_code_value" + + request = dialogflowcx_v3.StreamingDetectIntentRequest( + query_input=query_input, + ) + + # This method expects an iterator which contains + # 'dialogflowcx_v3.StreamingDetectIntentRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_detect_intent(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.cloud.dialogflowcx_v3.types.StreamingDetectIntentRequest]): + The request object iterator. The top-level message sent by the client to the + [Sessions.StreamingDetectIntent][google.cloud.dialogflow.cx.v3.Sessions.StreamingDetectIntent] + method. + + Multiple request messages should be sent in order: + + 1. The first message must contain + [session][google.cloud.dialogflow.cx.v3.StreamingDetectIntentRequest.session], + [query_input][google.cloud.dialogflow.cx.v3.StreamingDetectIntentRequest.query_input] + plus optionally + [query_params][google.cloud.dialogflow.cx.v3.StreamingDetectIntentRequest.query_params]. + If the client wants to receive an audio response, it + should also contain + [output_audio_config][google.cloud.dialogflow.cx.v3.StreamingDetectIntentRequest.output_audio_config]. + + 2. If + [query_input][google.cloud.dialogflow.cx.v3.StreamingDetectIntentRequest.query_input] + was set to + [query_input.audio.config][google.cloud.dialogflow.cx.v3.AudioInput.config], + all subsequent messages must contain + [query_input.audio.audio][google.cloud.dialogflow.cx.v3.AudioInput.audio] + to continue with Speech recognition. If you decide to + rather detect an intent from text input after you + already started Speech recognition, please send a + message with + [query_input.text][google.cloud.dialogflow.cx.v3.QueryInput.text]. + + However, note that: + + - Dialogflow will bill you for the audio duration so + far. + - Dialogflow discards all Speech recognition results + in favor of the input text. + - Dialogflow will use the language code from the + first message. + + After you sent all input, you must half-close or abort + the request stream. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.dialogflowcx_v3.types.StreamingDetectIntentResponse]: + The top-level message returned from the + [StreamingDetectIntent][google.cloud.dialogflow.cx.v3.Sessions.StreamingDetectIntent] + method. + + Multiple response messages (N) can be returned in + order. + + The first (N-1) responses set either the + recognition_result or detect_intent_response field, + depending on the request: + + - If the + StreamingDetectIntentRequest.query_input.audio + field was set, and the + StreamingDetectIntentRequest.enable_partial_response + field was false, the recognition_result field is + populated for each of the (N-1) responses. See the + [StreamingRecognitionResult][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult] + message for details about the result message + sequence. + - If the + StreamingDetectIntentRequest.enable_partial_response + field was true, the detect_intent_response field + is populated for each of the (N-1) responses, + where 1 <= N <= 4. These responses set the + [DetectIntentResponse.response_type][google.cloud.dialogflow.cx.v3.DetectIntentResponse.response_type] + field to PARTIAL. + + For the final Nth response message, the + detect_intent_response is fully populated, and + [DetectIntentResponse.response_type][google.cloud.dialogflow.cx.v3.DetectIntentResponse.response_type] + is set to FINAL. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.streaming_detect_intent] + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def match_intent( + self, + request: Optional[Union[session.MatchIntentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.MatchIntentResponse: + r"""Returns preliminary intent match results, doesn't + change the session status. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_match_intent(): + # Create a client + client = dialogflowcx_v3.SessionsClient() + + # Initialize request argument(s) + query_input = dialogflowcx_v3.QueryInput() + query_input.text.text = "text_value" + query_input.language_code = "language_code_value" + + request = dialogflowcx_v3.MatchIntentRequest( + session="session_value", + query_input=query_input, + ) + + # Make the request + response = client.match_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.MatchIntentRequest, dict]): + The request object. Request of [MatchIntent][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.MatchIntentResponse: + Response of [MatchIntent][]. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a session.MatchIntentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, session.MatchIntentRequest): + request = session.MatchIntentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.match_intent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("session", request.session),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fulfill_intent( + self, + request: Optional[Union[session.FulfillIntentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.FulfillIntentResponse: + r"""Fulfills a matched intent returned by + [MatchIntent][google.cloud.dialogflow.cx.v3.Sessions.MatchIntent]. + Must be called after + [MatchIntent][google.cloud.dialogflow.cx.v3.Sessions.MatchIntent], + with input from + [MatchIntentResponse][google.cloud.dialogflow.cx.v3.MatchIntentResponse]. + Otherwise, the behavior is undefined. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_fulfill_intent(): + # Create a client + client = dialogflowcx_v3.SessionsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.FulfillIntentRequest( + ) + + # Make the request + response = client.fulfill_intent(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.FulfillIntentRequest, dict]): + The request object. Request of [FulfillIntent][] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.FulfillIntentResponse: + Response of [FulfillIntent][] + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a session.FulfillIntentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, session.FulfillIntentRequest): + request = session.FulfillIntentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fulfill_intent] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "match_intent_request.session", + request.match_intent_request.session, + ), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SessionsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SessionsClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/__init__.py new file mode 100644 index 000000000000..f9053516b61d --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SessionsTransport +from .grpc import SessionsGrpcTransport +from .grpc_asyncio import SessionsGrpcAsyncIOTransport +from .rest import SessionsRestInterceptor, SessionsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SessionsTransport]] +_transport_registry["grpc"] = SessionsGrpcTransport +_transport_registry["grpc_asyncio"] = SessionsGrpcAsyncIOTransport +_transport_registry["rest"] = SessionsRestTransport + +__all__ = ( + "SessionsTransport", + "SessionsGrpcTransport", + "SessionsGrpcAsyncIOTransport", + "SessionsRestTransport", + "SessionsRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/base.py new file mode 100644 index 000000000000..aa456b765760 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/base.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import session + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SessionsTransport(abc.ABC): + """Abstract transport class for Sessions.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.detect_intent: gapic_v1.method.wrap_method( + self.detect_intent, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=220.0, + ), + default_timeout=220.0, + client_info=client_info, + ), + self.streaming_detect_intent: gapic_v1.method.wrap_method( + self.streaming_detect_intent, + default_timeout=220.0, + client_info=client_info, + ), + self.match_intent: gapic_v1.method.wrap_method( + self.match_intent, + default_timeout=None, + client_info=client_info, + ), + self.fulfill_intent: gapic_v1.method.wrap_method( + self.fulfill_intent, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def detect_intent( + self, + ) -> Callable[ + [session.DetectIntentRequest], + Union[session.DetectIntentResponse, Awaitable[session.DetectIntentResponse]], + ]: + raise NotImplementedError() + + @property + def streaming_detect_intent( + self, + ) -> Callable[ + [session.StreamingDetectIntentRequest], + Union[ + session.StreamingDetectIntentResponse, + Awaitable[session.StreamingDetectIntentResponse], + ], + ]: + raise NotImplementedError() + + @property + def match_intent( + self, + ) -> Callable[ + [session.MatchIntentRequest], + Union[session.MatchIntentResponse, Awaitable[session.MatchIntentResponse]], + ]: + raise NotImplementedError() + + @property + def fulfill_intent( + self, + ) -> Callable[ + [session.FulfillIntentRequest], + Union[session.FulfillIntentResponse, Awaitable[session.FulfillIntentResponse]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SessionsTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/grpc.py new file mode 100644 index 000000000000..7a0ef556ddf7 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/grpc.py @@ -0,0 +1,460 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import session + +from .base import DEFAULT_CLIENT_INFO, SessionsTransport + + +class SessionsGrpcTransport(SessionsTransport): + """gRPC backend transport for Sessions. + + A session represents an interaction with a user. You retrieve user + input and pass it to the + [DetectIntent][google.cloud.dialogflow.cx.v3.Sessions.DetectIntent] + method to determine user intent and respond. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def detect_intent( + self, + ) -> Callable[[session.DetectIntentRequest], session.DetectIntentResponse]: + r"""Return a callable for the detect intent method over gRPC. + + Processes a natural language query and returns structured, + actionable data as a result. This method is not idempotent, + because it may cause session entity types to be updated, which + in turn might affect results of future queries. + + Note: Always use agent versions for production traffic. See + `Versions and + environments `__. + + Returns: + Callable[[~.DetectIntentRequest], + ~.DetectIntentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "detect_intent" not in self._stubs: + self._stubs["detect_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Sessions/DetectIntent", + request_serializer=session.DetectIntentRequest.serialize, + response_deserializer=session.DetectIntentResponse.deserialize, + ) + return self._stubs["detect_intent"] + + @property + def streaming_detect_intent( + self, + ) -> Callable[ + [session.StreamingDetectIntentRequest], session.StreamingDetectIntentResponse + ]: + r"""Return a callable for the streaming detect intent method over gRPC. + + Processes a natural language query in audio format in a + streaming fashion and returns structured, actionable data as a + result. This method is only available via the gRPC API (not + REST). + + Note: Always use agent versions for production traffic. See + `Versions and + environments `__. + + Returns: + Callable[[~.StreamingDetectIntentRequest], + ~.StreamingDetectIntentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_detect_intent" not in self._stubs: + self._stubs["streaming_detect_intent"] = self.grpc_channel.stream_stream( + "/google.cloud.dialogflow.cx.v3.Sessions/StreamingDetectIntent", + request_serializer=session.StreamingDetectIntentRequest.serialize, + response_deserializer=session.StreamingDetectIntentResponse.deserialize, + ) + return self._stubs["streaming_detect_intent"] + + @property + def match_intent( + self, + ) -> Callable[[session.MatchIntentRequest], session.MatchIntentResponse]: + r"""Return a callable for the match intent method over gRPC. + + Returns preliminary intent match results, doesn't + change the session status. + + Returns: + Callable[[~.MatchIntentRequest], + ~.MatchIntentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "match_intent" not in self._stubs: + self._stubs["match_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Sessions/MatchIntent", + request_serializer=session.MatchIntentRequest.serialize, + response_deserializer=session.MatchIntentResponse.deserialize, + ) + return self._stubs["match_intent"] + + @property + def fulfill_intent( + self, + ) -> Callable[[session.FulfillIntentRequest], session.FulfillIntentResponse]: + r"""Return a callable for the fulfill intent method over gRPC. + + Fulfills a matched intent returned by + [MatchIntent][google.cloud.dialogflow.cx.v3.Sessions.MatchIntent]. + Must be called after + [MatchIntent][google.cloud.dialogflow.cx.v3.Sessions.MatchIntent], + with input from + [MatchIntentResponse][google.cloud.dialogflow.cx.v3.MatchIntentResponse]. + Otherwise, the behavior is undefined. + + Returns: + Callable[[~.FulfillIntentRequest], + ~.FulfillIntentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fulfill_intent" not in self._stubs: + self._stubs["fulfill_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Sessions/FulfillIntent", + request_serializer=session.FulfillIntentRequest.serialize, + response_deserializer=session.FulfillIntentResponse.deserialize, + ) + return self._stubs["fulfill_intent"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SessionsGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/grpc_asyncio.py new file mode 100644 index 000000000000..4f5e5e139450 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/grpc_asyncio.py @@ -0,0 +1,464 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import session + +from .base import DEFAULT_CLIENT_INFO, SessionsTransport +from .grpc import SessionsGrpcTransport + + +class SessionsGrpcAsyncIOTransport(SessionsTransport): + """gRPC AsyncIO backend transport for Sessions. + + A session represents an interaction with a user. You retrieve user + input and pass it to the + [DetectIntent][google.cloud.dialogflow.cx.v3.Sessions.DetectIntent] + method to determine user intent and respond. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def detect_intent( + self, + ) -> Callable[ + [session.DetectIntentRequest], Awaitable[session.DetectIntentResponse] + ]: + r"""Return a callable for the detect intent method over gRPC. + + Processes a natural language query and returns structured, + actionable data as a result. This method is not idempotent, + because it may cause session entity types to be updated, which + in turn might affect results of future queries. + + Note: Always use agent versions for production traffic. See + `Versions and + environments `__. + + Returns: + Callable[[~.DetectIntentRequest], + Awaitable[~.DetectIntentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "detect_intent" not in self._stubs: + self._stubs["detect_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Sessions/DetectIntent", + request_serializer=session.DetectIntentRequest.serialize, + response_deserializer=session.DetectIntentResponse.deserialize, + ) + return self._stubs["detect_intent"] + + @property + def streaming_detect_intent( + self, + ) -> Callable[ + [session.StreamingDetectIntentRequest], + Awaitable[session.StreamingDetectIntentResponse], + ]: + r"""Return a callable for the streaming detect intent method over gRPC. + + Processes a natural language query in audio format in a + streaming fashion and returns structured, actionable data as a + result. This method is only available via the gRPC API (not + REST). + + Note: Always use agent versions for production traffic. See + `Versions and + environments `__. + + Returns: + Callable[[~.StreamingDetectIntentRequest], + Awaitable[~.StreamingDetectIntentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_detect_intent" not in self._stubs: + self._stubs["streaming_detect_intent"] = self.grpc_channel.stream_stream( + "/google.cloud.dialogflow.cx.v3.Sessions/StreamingDetectIntent", + request_serializer=session.StreamingDetectIntentRequest.serialize, + response_deserializer=session.StreamingDetectIntentResponse.deserialize, + ) + return self._stubs["streaming_detect_intent"] + + @property + def match_intent( + self, + ) -> Callable[[session.MatchIntentRequest], Awaitable[session.MatchIntentResponse]]: + r"""Return a callable for the match intent method over gRPC. + + Returns preliminary intent match results, doesn't + change the session status. + + Returns: + Callable[[~.MatchIntentRequest], + Awaitable[~.MatchIntentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "match_intent" not in self._stubs: + self._stubs["match_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Sessions/MatchIntent", + request_serializer=session.MatchIntentRequest.serialize, + response_deserializer=session.MatchIntentResponse.deserialize, + ) + return self._stubs["match_intent"] + + @property + def fulfill_intent( + self, + ) -> Callable[ + [session.FulfillIntentRequest], Awaitable[session.FulfillIntentResponse] + ]: + r"""Return a callable for the fulfill intent method over gRPC. + + Fulfills a matched intent returned by + [MatchIntent][google.cloud.dialogflow.cx.v3.Sessions.MatchIntent]. + Must be called after + [MatchIntent][google.cloud.dialogflow.cx.v3.Sessions.MatchIntent], + with input from + [MatchIntentResponse][google.cloud.dialogflow.cx.v3.MatchIntentResponse]. + Otherwise, the behavior is undefined. + + Returns: + Callable[[~.FulfillIntentRequest], + Awaitable[~.FulfillIntentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fulfill_intent" not in self._stubs: + self._stubs["fulfill_intent"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Sessions/FulfillIntent", + request_serializer=session.FulfillIntentRequest.serialize, + response_deserializer=session.FulfillIntentResponse.deserialize, + ) + return self._stubs["fulfill_intent"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("SessionsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/rest.py new file mode 100644 index 000000000000..b632ef1e879f --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/rest.py @@ -0,0 +1,1067 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import session + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SessionsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SessionsRestInterceptor: + """Interceptor for Sessions. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SessionsRestTransport. + + .. code-block:: python + class MyCustomSessionsInterceptor(SessionsRestInterceptor): + def pre_detect_intent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_detect_intent(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fulfill_intent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fulfill_intent(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_match_intent(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_match_intent(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SessionsRestTransport(interceptor=MyCustomSessionsInterceptor()) + client = SessionsClient(transport=transport) + + + """ + + def pre_detect_intent( + self, request: session.DetectIntentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[session.DetectIntentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for detect_intent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Sessions server. + """ + return request, metadata + + def post_detect_intent( + self, response: session.DetectIntentResponse + ) -> session.DetectIntentResponse: + """Post-rpc interceptor for detect_intent + + Override in a subclass to manipulate the response + after it is returned by the Sessions server but before + it is returned to user code. + """ + return response + + def pre_fulfill_intent( + self, request: session.FulfillIntentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[session.FulfillIntentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fulfill_intent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Sessions server. + """ + return request, metadata + + def post_fulfill_intent( + self, response: session.FulfillIntentResponse + ) -> session.FulfillIntentResponse: + """Post-rpc interceptor for fulfill_intent + + Override in a subclass to manipulate the response + after it is returned by the Sessions server but before + it is returned to user code. + """ + return response + + def pre_match_intent( + self, request: session.MatchIntentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[session.MatchIntentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for match_intent + + Override in a subclass to manipulate the request or metadata + before they are sent to the Sessions server. + """ + return request, metadata + + def post_match_intent( + self, response: session.MatchIntentResponse + ) -> session.MatchIntentResponse: + """Post-rpc interceptor for match_intent + + Override in a subclass to manipulate the response + after it is returned by the Sessions server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Sessions server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Sessions server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Sessions server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Sessions server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Sessions server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Sessions server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Sessions server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Sessions server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Sessions server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Sessions server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SessionsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SessionsRestInterceptor + + +class SessionsRestTransport(SessionsTransport): + """REST backend transport for Sessions. + + A session represents an interaction with a user. You retrieve user + input and pass it to the + [DetectIntent][google.cloud.dialogflow.cx.v3.Sessions.DetectIntent] + method to determine user intent and respond. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SessionsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SessionsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _DetectIntent(SessionsRestStub): + def __hash__(self): + return hash("DetectIntent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: session.DetectIntentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.DetectIntentResponse: + r"""Call the detect intent method over HTTP. + + Args: + request (~.session.DetectIntentRequest): + The request object. The request to detect user's intent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.session.DetectIntentResponse: + The message returned from the + DetectIntent method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{session=projects/*/locations/*/agents/*/sessions/*}:detectIntent", + "body": "*", + }, + { + "method": "post", + "uri": "/v3/{session=projects/*/locations/*/agents/*/environments/*/sessions/*}:detectIntent", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_detect_intent(request, metadata) + pb_request = session.DetectIntentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = session.DetectIntentResponse() + pb_resp = session.DetectIntentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_detect_intent(resp) + return resp + + class _FulfillIntent(SessionsRestStub): + def __hash__(self): + return hash("FulfillIntent") + + def __call__( + self, + request: session.FulfillIntentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.FulfillIntentResponse: + r"""Call the fulfill intent method over HTTP. + + Args: + request (~.session.FulfillIntentRequest): + The request object. Request of [FulfillIntent][] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.session.FulfillIntentResponse: + Response of [FulfillIntent][] + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{match_intent_request.session=projects/*/locations/*/agents/*/sessions/*}:fulfillIntent", + "body": "*", + }, + { + "method": "post", + "uri": "/v3/{match_intent_request.session=projects/*/locations/*/agents/*/environments/*/sessions/*}:fulfillIntent", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_fulfill_intent(request, metadata) + pb_request = session.FulfillIntentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = session.FulfillIntentResponse() + pb_resp = session.FulfillIntentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fulfill_intent(resp) + return resp + + class _MatchIntent(SessionsRestStub): + def __hash__(self): + return hash("MatchIntent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: session.MatchIntentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> session.MatchIntentResponse: + r"""Call the match intent method over HTTP. + + Args: + request (~.session.MatchIntentRequest): + The request object. Request of [MatchIntent][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.session.MatchIntentResponse: + Response of [MatchIntent][]. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{session=projects/*/locations/*/agents/*/sessions/*}:matchIntent", + "body": "*", + }, + { + "method": "post", + "uri": "/v3/{session=projects/*/locations/*/agents/*/environments/*/sessions/*}:matchIntent", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_match_intent(request, metadata) + pb_request = session.MatchIntentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = session.MatchIntentResponse() + pb_resp = session.MatchIntentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_match_intent(resp) + return resp + + class _StreamingDetectIntent(SessionsRestStub): + def __hash__(self): + return hash("StreamingDetectIntent") + + def __call__( + self, + request: session.StreamingDetectIntentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rest_streaming.ResponseIterator: + raise NotImplementedError( + "Method StreamingDetectIntent is not available over REST transport" + ) + + @property + def detect_intent( + self, + ) -> Callable[[session.DetectIntentRequest], session.DetectIntentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DetectIntent(self._session, self._host, self._interceptor) # type: ignore + + @property + def fulfill_intent( + self, + ) -> Callable[[session.FulfillIntentRequest], session.FulfillIntentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FulfillIntent(self._session, self._host, self._interceptor) # type: ignore + + @property + def match_intent( + self, + ) -> Callable[[session.MatchIntentRequest], session.MatchIntentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._MatchIntent(self._session, self._host, self._interceptor) # type: ignore + + @property + def streaming_detect_intent( + self, + ) -> Callable[ + [session.StreamingDetectIntentRequest], session.StreamingDetectIntentResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamingDetectIntent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(SessionsRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(SessionsRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SessionsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SessionsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SessionsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SessionsRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/__init__.py new file mode 100644 index 000000000000..48b882a3671f --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TestCasesAsyncClient +from .client import TestCasesClient + +__all__ = ( + "TestCasesClient", + "TestCasesAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py new file mode 100644 index 000000000000..45f9a053aeee --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/async_client.py @@ -0,0 +1,1789 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.test_cases import pagers +from google.cloud.dialogflowcx_v3.types import test_case +from google.cloud.dialogflowcx_v3.types import test_case as gcdc_test_case + +from .client import TestCasesClient +from .transports.base import DEFAULT_CLIENT_INFO, TestCasesTransport +from .transports.grpc_asyncio import TestCasesGrpcAsyncIOTransport + + +class TestCasesAsyncClient: + """Service for managing [Test + Cases][google.cloud.dialogflow.cx.v3.TestCase] and [Test Case + Results][google.cloud.dialogflow.cx.v3.TestCaseResult]. + """ + + _client: TestCasesClient + + DEFAULT_ENDPOINT = TestCasesClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TestCasesClient.DEFAULT_MTLS_ENDPOINT + + agent_path = staticmethod(TestCasesClient.agent_path) + parse_agent_path = staticmethod(TestCasesClient.parse_agent_path) + entity_type_path = staticmethod(TestCasesClient.entity_type_path) + parse_entity_type_path = staticmethod(TestCasesClient.parse_entity_type_path) + environment_path = staticmethod(TestCasesClient.environment_path) + parse_environment_path = staticmethod(TestCasesClient.parse_environment_path) + flow_path = staticmethod(TestCasesClient.flow_path) + parse_flow_path = staticmethod(TestCasesClient.parse_flow_path) + intent_path = staticmethod(TestCasesClient.intent_path) + parse_intent_path = staticmethod(TestCasesClient.parse_intent_path) + page_path = staticmethod(TestCasesClient.page_path) + parse_page_path = staticmethod(TestCasesClient.parse_page_path) + test_case_path = staticmethod(TestCasesClient.test_case_path) + parse_test_case_path = staticmethod(TestCasesClient.parse_test_case_path) + test_case_result_path = staticmethod(TestCasesClient.test_case_result_path) + parse_test_case_result_path = staticmethod( + TestCasesClient.parse_test_case_result_path + ) + transition_route_group_path = staticmethod( + TestCasesClient.transition_route_group_path + ) + parse_transition_route_group_path = staticmethod( + TestCasesClient.parse_transition_route_group_path + ) + webhook_path = staticmethod(TestCasesClient.webhook_path) + parse_webhook_path = staticmethod(TestCasesClient.parse_webhook_path) + common_billing_account_path = staticmethod( + TestCasesClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TestCasesClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(TestCasesClient.common_folder_path) + parse_common_folder_path = staticmethod(TestCasesClient.parse_common_folder_path) + common_organization_path = staticmethod(TestCasesClient.common_organization_path) + parse_common_organization_path = staticmethod( + TestCasesClient.parse_common_organization_path + ) + common_project_path = staticmethod(TestCasesClient.common_project_path) + parse_common_project_path = staticmethod(TestCasesClient.parse_common_project_path) + common_location_path = staticmethod(TestCasesClient.common_location_path) + parse_common_location_path = staticmethod( + TestCasesClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TestCasesAsyncClient: The constructed client. + """ + return TestCasesClient.from_service_account_info.__func__(TestCasesAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TestCasesAsyncClient: The constructed client. + """ + return TestCasesClient.from_service_account_file.__func__(TestCasesAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TestCasesClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TestCasesTransport: + """Returns the transport used by the client instance. + + Returns: + TestCasesTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(TestCasesClient).get_transport_class, type(TestCasesClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TestCasesTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the test cases client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TestCasesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TestCasesClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_test_cases( + self, + request: Optional[Union[test_case.ListTestCasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTestCasesAsyncPager: + r"""Fetches a list of test cases for a given agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_test_cases(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListTestCasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_test_cases(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListTestCasesRequest, dict]]): + The request object. The request message for + [TestCases.ListTestCases][google.cloud.dialogflow.cx.v3.TestCases.ListTestCases]. + parent (:class:`str`): + Required. The agent to list all pages for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.test_cases.pagers.ListTestCasesAsyncPager: + The response message for + [TestCases.ListTestCases][google.cloud.dialogflow.cx.v3.TestCases.ListTestCases]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = test_case.ListTestCasesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_test_cases, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTestCasesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_delete_test_cases( + self, + request: Optional[Union[test_case.BatchDeleteTestCasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Batch deletes test cases. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_batch_delete_test_cases(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.BatchDeleteTestCasesRequest( + parent="parent_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + await client.batch_delete_test_cases(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.BatchDeleteTestCasesRequest, dict]]): + The request object. The request message for + [TestCases.BatchDeleteTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchDeleteTestCases]. + parent (:class:`str`): + Required. The agent to delete test cases from. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = test_case.BatchDeleteTestCasesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_delete_test_cases, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_test_case( + self, + request: Optional[Union[test_case.GetTestCaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> test_case.TestCase: + r"""Gets a test case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_test_case(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetTestCaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_test_case(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetTestCaseRequest, dict]]): + The request object. The request message for + [TestCases.GetTestCase][google.cloud.dialogflow.cx.v3.TestCases.GetTestCase]. + name (:class:`str`): + Required. The name of the testcase. Format: + ``projects//locations//agents//testCases/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TestCase: + Represents a test case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = test_case.GetTestCaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_test_case, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_test_case( + self, + request: Optional[Union[gcdc_test_case.CreateTestCaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + test_case: Optional[gcdc_test_case.TestCase] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_test_case.TestCase: + r"""Creates a test case for the given agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_test_case(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + test_case = dialogflowcx_v3.TestCase() + test_case.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateTestCaseRequest( + parent="parent_value", + test_case=test_case, + ) + + # Make the request + response = await client.create_test_case(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateTestCaseRequest, dict]]): + The request object. The request message for + [TestCases.CreateTestCase][google.cloud.dialogflow.cx.v3.TestCases.CreateTestCase]. + parent (:class:`str`): + Required. The agent to create the test case for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_case (:class:`google.cloud.dialogflowcx_v3.types.TestCase`): + Required. The test case to create. + This corresponds to the ``test_case`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TestCase: + Represents a test case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, test_case]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_test_case.CreateTestCaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if test_case is not None: + request.test_case = test_case + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_test_case, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_test_case( + self, + request: Optional[Union[gcdc_test_case.UpdateTestCaseRequest, dict]] = None, + *, + test_case: Optional[gcdc_test_case.TestCase] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_test_case.TestCase: + r"""Updates the specified test case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_test_case(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + test_case = dialogflowcx_v3.TestCase() + test_case.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateTestCaseRequest( + test_case=test_case, + ) + + # Make the request + response = await client.update_test_case(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateTestCaseRequest, dict]]): + The request object. The request message for + [TestCases.UpdateTestCase][google.cloud.dialogflow.cx.v3.TestCases.UpdateTestCase]. + test_case (:class:`google.cloud.dialogflowcx_v3.types.TestCase`): + Required. The test case to update. + This corresponds to the ``test_case`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The mask to specify which fields should be + updated. The + [``creationTime``][google.cloud.dialogflow.cx.v3.TestCase.creation_time] + and + [``lastTestResult``][google.cloud.dialogflow.cx.v3.TestCase.last_test_result] + cannot be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TestCase: + Represents a test case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([test_case, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_test_case.UpdateTestCaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if test_case is not None: + request.test_case = test_case + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_test_case, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("test_case.name", request.test_case.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_test_case( + self, + request: Optional[Union[test_case.RunTestCaseRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Kicks off a test case run. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [RunTestCaseMetadata][google.cloud.dialogflow.cx.v3.RunTestCaseMetadata] + - ``response``: + [RunTestCaseResponse][google.cloud.dialogflow.cx.v3.RunTestCaseResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_run_test_case(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.RunTestCaseRequest( + name="name_value", + ) + + # Make the request + operation = client.run_test_case(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.RunTestCaseRequest, dict]]): + The request object. The request message for + [TestCases.RunTestCase][google.cloud.dialogflow.cx.v3.TestCases.RunTestCase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.RunTestCaseResponse` The response message for + [TestCases.RunTestCase][google.cloud.dialogflow.cx.v3.TestCases.RunTestCase]. + + """ + # Create or coerce a protobuf request object. + request = test_case.RunTestCaseRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_test_case, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + test_case.RunTestCaseResponse, + metadata_type=test_case.RunTestCaseMetadata, + ) + + # Done; return the response. + return response + + async def batch_run_test_cases( + self, + request: Optional[Union[test_case.BatchRunTestCasesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Kicks off a batch run of test cases. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [BatchRunTestCasesMetadata][google.cloud.dialogflow.cx.v3.BatchRunTestCasesMetadata] + - ``response``: + [BatchRunTestCasesResponse][google.cloud.dialogflow.cx.v3.BatchRunTestCasesResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_batch_run_test_cases(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.BatchRunTestCasesRequest( + parent="parent_value", + test_cases=['test_cases_value1', 'test_cases_value2'], + ) + + # Make the request + operation = client.batch_run_test_cases(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.BatchRunTestCasesRequest, dict]]): + The request object. The request message for + [TestCases.BatchRunTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchRunTestCases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.BatchRunTestCasesResponse` The response message for + [TestCases.BatchRunTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchRunTestCases]. + + """ + # Create or coerce a protobuf request object. + request = test_case.BatchRunTestCasesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_run_test_cases, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + test_case.BatchRunTestCasesResponse, + metadata_type=test_case.BatchRunTestCasesMetadata, + ) + + # Done; return the response. + return response + + async def calculate_coverage( + self, + request: Optional[Union[test_case.CalculateCoverageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> test_case.CalculateCoverageResponse: + r"""Calculates the test coverage for an agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_calculate_coverage(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.CalculateCoverageRequest( + agent="agent_value", + type_="TRANSITION_ROUTE_GROUP", + ) + + # Make the request + response = await client.calculate_coverage(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CalculateCoverageRequest, dict]]): + The request object. The request message for + [TestCases.CalculateCoverage][google.cloud.dialogflow.cx.v3.TestCases.CalculateCoverage]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.CalculateCoverageResponse: + The response message for + [TestCases.CalculateCoverage][google.cloud.dialogflow.cx.v3.TestCases.CalculateCoverage]. + + """ + # Create or coerce a protobuf request object. + request = test_case.CalculateCoverageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.calculate_coverage, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("agent", request.agent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def import_test_cases( + self, + request: Optional[Union[test_case.ImportTestCasesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports the test cases from a Cloud Storage bucket or a local + file. It always creates new test cases and won't overwrite any + existing ones. The provided ID in the imported test case is + neglected. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportTestCasesMetadata][google.cloud.dialogflow.cx.v3.ImportTestCasesMetadata] + - ``response``: + [ImportTestCasesResponse][google.cloud.dialogflow.cx.v3.ImportTestCasesResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_import_test_cases(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ImportTestCasesRequest( + gcs_uri="gcs_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_test_cases(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ImportTestCasesRequest, dict]]): + The request object. The request message for + [TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ImportTestCases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ImportTestCasesResponse` The response message for + [TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ImportTestCases]. + + """ + # Create or coerce a protobuf request object. + request = test_case.ImportTestCasesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_test_cases, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + test_case.ImportTestCasesResponse, + metadata_type=test_case.ImportTestCasesMetadata, + ) + + # Done; return the response. + return response + + async def export_test_cases( + self, + request: Optional[Union[test_case.ExportTestCasesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports the test cases under the agent to a Cloud Storage bucket + or a local file. Filter can be applied to export a subset of + test cases. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportTestCasesMetadata][google.cloud.dialogflow.cx.v3.ExportTestCasesMetadata] + - ``response``: + [ExportTestCasesResponse][google.cloud.dialogflow.cx.v3.ExportTestCasesResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_export_test_cases(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ExportTestCasesRequest( + gcs_uri="gcs_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.export_test_cases(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ExportTestCasesRequest, dict]]): + The request object. The request message for + [TestCases.ExportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ExportTestCases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ExportTestCasesResponse` The response message for + [TestCases.ExportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ExportTestCases]. + + """ + # Create or coerce a protobuf request object. + request = test_case.ExportTestCasesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_test_cases, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + test_case.ExportTestCasesResponse, + metadata_type=test_case.ExportTestCasesMetadata, + ) + + # Done; return the response. + return response + + async def list_test_case_results( + self, + request: Optional[Union[test_case.ListTestCaseResultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTestCaseResultsAsyncPager: + r"""Fetches the list of run results for the given test + case. A maximum of 100 results are kept for each test + case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_test_case_results(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListTestCaseResultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_test_case_results(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListTestCaseResultsRequest, dict]]): + The request object. The request message for + [TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults]. + parent (:class:`str`): + Required. The test case to list results for. Format: + ``projects//locations//agents// testCases/``. + Specify a ``-`` as a wildcard for TestCase ID to list + results across multiple test cases. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.test_cases.pagers.ListTestCaseResultsAsyncPager: + The response message for + [TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = test_case.ListTestCaseResultsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_test_case_results, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTestCaseResultsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_test_case_result( + self, + request: Optional[Union[test_case.GetTestCaseResultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> test_case.TestCaseResult: + r"""Gets a test case result. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_test_case_result(): + # Create a client + client = dialogflowcx_v3.TestCasesAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetTestCaseResultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_test_case_result(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetTestCaseResultRequest, dict]]): + The request object. The request message for + [TestCases.GetTestCaseResult][google.cloud.dialogflow.cx.v3.TestCases.GetTestCaseResult]. + name (:class:`str`): + Required. The name of the testcase. Format: + ``projects//locations//agents//testCases//results/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TestCaseResult: + Represents a result from running a + test case in an agent environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = test_case.GetTestCaseResultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_test_case_result, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "TestCasesAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TestCasesAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py new file mode 100644 index 000000000000..bcf2169dcadc --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py @@ -0,0 +1,2232 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.test_cases import pagers +from google.cloud.dialogflowcx_v3.types import test_case +from google.cloud.dialogflowcx_v3.types import test_case as gcdc_test_case + +from .transports.base import DEFAULT_CLIENT_INFO, TestCasesTransport +from .transports.grpc import TestCasesGrpcTransport +from .transports.grpc_asyncio import TestCasesGrpcAsyncIOTransport +from .transports.rest import TestCasesRestTransport + + +class TestCasesClientMeta(type): + """Metaclass for the TestCases client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[TestCasesTransport]] + _transport_registry["grpc"] = TestCasesGrpcTransport + _transport_registry["grpc_asyncio"] = TestCasesGrpcAsyncIOTransport + _transport_registry["rest"] = TestCasesRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TestCasesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TestCasesClient(metaclass=TestCasesClientMeta): + """Service for managing [Test + Cases][google.cloud.dialogflow.cx.v3.TestCase] and [Test Case + Results][google.cloud.dialogflow.cx.v3.TestCaseResult]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TestCasesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TestCasesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TestCasesTransport: + """Returns the transport used by the client instance. + + Returns: + TestCasesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def agent_path( + project: str, + location: str, + agent: str, + ) -> str: + """Returns a fully-qualified agent string.""" + return "projects/{project}/locations/{location}/agents/{agent}".format( + project=project, + location=location, + agent=agent, + ) + + @staticmethod + def parse_agent_path(path: str) -> Dict[str, str]: + """Parses a agent path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def entity_type_path( + project: str, + location: str, + agent: str, + entity_type: str, + ) -> str: + """Returns a fully-qualified entity_type string.""" + return "projects/{project}/locations/{location}/agents/{agent}/entityTypes/{entity_type}".format( + project=project, + location=location, + agent=agent, + entity_type=entity_type, + ) + + @staticmethod + def parse_entity_type_path(path: str) -> Dict[str, str]: + """Parses a entity_type path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/entityTypes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def environment_path( + project: str, + location: str, + agent: str, + environment: str, + ) -> str: + """Returns a fully-qualified environment string.""" + return "projects/{project}/locations/{location}/agents/{agent}/environments/{environment}".format( + project=project, + location=location, + agent=agent, + environment=environment, + ) + + @staticmethod + def parse_environment_path(path: str) -> Dict[str, str]: + """Parses a environment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/environments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def flow_path( + project: str, + location: str, + agent: str, + flow: str, + ) -> str: + """Returns a fully-qualified flow string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}".format( + project=project, + location=location, + agent=agent, + flow=flow, + ) + + @staticmethod + def parse_flow_path(path: str) -> Dict[str, str]: + """Parses a flow path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def intent_path( + project: str, + location: str, + agent: str, + intent: str, + ) -> str: + """Returns a fully-qualified intent string.""" + return "projects/{project}/locations/{location}/agents/{agent}/intents/{intent}".format( + project=project, + location=location, + agent=agent, + intent=intent, + ) + + @staticmethod + def parse_intent_path(path: str) -> Dict[str, str]: + """Parses a intent path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/intents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def page_path( + project: str, + location: str, + agent: str, + flow: str, + page: str, + ) -> str: + """Returns a fully-qualified page string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/pages/{page}".format( + project=project, + location=location, + agent=agent, + flow=flow, + page=page, + ) + + @staticmethod + def parse_page_path(path: str) -> Dict[str, str]: + """Parses a page path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/pages/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def test_case_path( + project: str, + location: str, + agent: str, + test_case: str, + ) -> str: + """Returns a fully-qualified test_case string.""" + return "projects/{project}/locations/{location}/agents/{agent}/testCases/{test_case}".format( + project=project, + location=location, + agent=agent, + test_case=test_case, + ) + + @staticmethod + def parse_test_case_path(path: str) -> Dict[str, str]: + """Parses a test_case path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/testCases/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def test_case_result_path( + project: str, + location: str, + agent: str, + test_case: str, + result: str, + ) -> str: + """Returns a fully-qualified test_case_result string.""" + return "projects/{project}/locations/{location}/agents/{agent}/testCases/{test_case}/results/{result}".format( + project=project, + location=location, + agent=agent, + test_case=test_case, + result=result, + ) + + @staticmethod + def parse_test_case_result_path(path: str) -> Dict[str, str]: + """Parses a test_case_result path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/testCases/(?P.+?)/results/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def transition_route_group_path( + project: str, + location: str, + agent: str, + flow: str, + transition_route_group: str, + ) -> str: + """Returns a fully-qualified transition_route_group string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/transitionRouteGroups/{transition_route_group}".format( + project=project, + location=location, + agent=agent, + flow=flow, + transition_route_group=transition_route_group, + ) + + @staticmethod + def parse_transition_route_group_path(path: str) -> Dict[str, str]: + """Parses a transition_route_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/transitionRouteGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def webhook_path( + project: str, + location: str, + agent: str, + webhook: str, + ) -> str: + """Returns a fully-qualified webhook string.""" + return "projects/{project}/locations/{location}/agents/{agent}/webhooks/{webhook}".format( + project=project, + location=location, + agent=agent, + webhook=webhook, + ) + + @staticmethod + def parse_webhook_path(path: str) -> Dict[str, str]: + """Parses a webhook path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/webhooks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TestCasesTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the test cases client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TestCasesTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TestCasesTransport): + # transport is a TestCasesTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_test_cases( + self, + request: Optional[Union[test_case.ListTestCasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTestCasesPager: + r"""Fetches a list of test cases for a given agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_test_cases(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListTestCasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_test_cases(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListTestCasesRequest, dict]): + The request object. The request message for + [TestCases.ListTestCases][google.cloud.dialogflow.cx.v3.TestCases.ListTestCases]. + parent (str): + Required. The agent to list all pages for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.test_cases.pagers.ListTestCasesPager: + The response message for + [TestCases.ListTestCases][google.cloud.dialogflow.cx.v3.TestCases.ListTestCases]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a test_case.ListTestCasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, test_case.ListTestCasesRequest): + request = test_case.ListTestCasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_test_cases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTestCasesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_delete_test_cases( + self, + request: Optional[Union[test_case.BatchDeleteTestCasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Batch deletes test cases. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_batch_delete_test_cases(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.BatchDeleteTestCasesRequest( + parent="parent_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + client.batch_delete_test_cases(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.BatchDeleteTestCasesRequest, dict]): + The request object. The request message for + [TestCases.BatchDeleteTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchDeleteTestCases]. + parent (str): + Required. The agent to delete test cases from. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a test_case.BatchDeleteTestCasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, test_case.BatchDeleteTestCasesRequest): + request = test_case.BatchDeleteTestCasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_delete_test_cases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_test_case( + self, + request: Optional[Union[test_case.GetTestCaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> test_case.TestCase: + r"""Gets a test case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_test_case(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetTestCaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_test_case(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetTestCaseRequest, dict]): + The request object. The request message for + [TestCases.GetTestCase][google.cloud.dialogflow.cx.v3.TestCases.GetTestCase]. + name (str): + Required. The name of the testcase. Format: + ``projects//locations//agents//testCases/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TestCase: + Represents a test case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a test_case.GetTestCaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, test_case.GetTestCaseRequest): + request = test_case.GetTestCaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_test_case] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_test_case( + self, + request: Optional[Union[gcdc_test_case.CreateTestCaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + test_case: Optional[gcdc_test_case.TestCase] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_test_case.TestCase: + r"""Creates a test case for the given agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_test_case(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + test_case = dialogflowcx_v3.TestCase() + test_case.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateTestCaseRequest( + parent="parent_value", + test_case=test_case, + ) + + # Make the request + response = client.create_test_case(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateTestCaseRequest, dict]): + The request object. The request message for + [TestCases.CreateTestCase][google.cloud.dialogflow.cx.v3.TestCases.CreateTestCase]. + parent (str): + Required. The agent to create the test case for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_case (google.cloud.dialogflowcx_v3.types.TestCase): + Required. The test case to create. + This corresponds to the ``test_case`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TestCase: + Represents a test case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, test_case]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_test_case.CreateTestCaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_test_case.CreateTestCaseRequest): + request = gcdc_test_case.CreateTestCaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if test_case is not None: + request.test_case = test_case + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_test_case] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_test_case( + self, + request: Optional[Union[gcdc_test_case.UpdateTestCaseRequest, dict]] = None, + *, + test_case: Optional[gcdc_test_case.TestCase] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_test_case.TestCase: + r"""Updates the specified test case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_test_case(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + test_case = dialogflowcx_v3.TestCase() + test_case.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateTestCaseRequest( + test_case=test_case, + ) + + # Make the request + response = client.update_test_case(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateTestCaseRequest, dict]): + The request object. The request message for + [TestCases.UpdateTestCase][google.cloud.dialogflow.cx.v3.TestCases.UpdateTestCase]. + test_case (google.cloud.dialogflowcx_v3.types.TestCase): + Required. The test case to update. + This corresponds to the ``test_case`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The mask to specify which fields should be + updated. The + [``creationTime``][google.cloud.dialogflow.cx.v3.TestCase.creation_time] + and + [``lastTestResult``][google.cloud.dialogflow.cx.v3.TestCase.last_test_result] + cannot be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TestCase: + Represents a test case. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([test_case, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_test_case.UpdateTestCaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_test_case.UpdateTestCaseRequest): + request = gcdc_test_case.UpdateTestCaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if test_case is not None: + request.test_case = test_case + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_test_case] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("test_case.name", request.test_case.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_test_case( + self, + request: Optional[Union[test_case.RunTestCaseRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Kicks off a test case run. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [RunTestCaseMetadata][google.cloud.dialogflow.cx.v3.RunTestCaseMetadata] + - ``response``: + [RunTestCaseResponse][google.cloud.dialogflow.cx.v3.RunTestCaseResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_run_test_case(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.RunTestCaseRequest( + name="name_value", + ) + + # Make the request + operation = client.run_test_case(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.RunTestCaseRequest, dict]): + The request object. The request message for + [TestCases.RunTestCase][google.cloud.dialogflow.cx.v3.TestCases.RunTestCase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.RunTestCaseResponse` The response message for + [TestCases.RunTestCase][google.cloud.dialogflow.cx.v3.TestCases.RunTestCase]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a test_case.RunTestCaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, test_case.RunTestCaseRequest): + request = test_case.RunTestCaseRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_test_case] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + test_case.RunTestCaseResponse, + metadata_type=test_case.RunTestCaseMetadata, + ) + + # Done; return the response. + return response + + def batch_run_test_cases( + self, + request: Optional[Union[test_case.BatchRunTestCasesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Kicks off a batch run of test cases. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [BatchRunTestCasesMetadata][google.cloud.dialogflow.cx.v3.BatchRunTestCasesMetadata] + - ``response``: + [BatchRunTestCasesResponse][google.cloud.dialogflow.cx.v3.BatchRunTestCasesResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_batch_run_test_cases(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.BatchRunTestCasesRequest( + parent="parent_value", + test_cases=['test_cases_value1', 'test_cases_value2'], + ) + + # Make the request + operation = client.batch_run_test_cases(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.BatchRunTestCasesRequest, dict]): + The request object. The request message for + [TestCases.BatchRunTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchRunTestCases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.BatchRunTestCasesResponse` The response message for + [TestCases.BatchRunTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchRunTestCases]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a test_case.BatchRunTestCasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, test_case.BatchRunTestCasesRequest): + request = test_case.BatchRunTestCasesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_run_test_cases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + test_case.BatchRunTestCasesResponse, + metadata_type=test_case.BatchRunTestCasesMetadata, + ) + + # Done; return the response. + return response + + def calculate_coverage( + self, + request: Optional[Union[test_case.CalculateCoverageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> test_case.CalculateCoverageResponse: + r"""Calculates the test coverage for an agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_calculate_coverage(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.CalculateCoverageRequest( + agent="agent_value", + type_="TRANSITION_ROUTE_GROUP", + ) + + # Make the request + response = client.calculate_coverage(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CalculateCoverageRequest, dict]): + The request object. The request message for + [TestCases.CalculateCoverage][google.cloud.dialogflow.cx.v3.TestCases.CalculateCoverage]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.CalculateCoverageResponse: + The response message for + [TestCases.CalculateCoverage][google.cloud.dialogflow.cx.v3.TestCases.CalculateCoverage]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a test_case.CalculateCoverageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, test_case.CalculateCoverageRequest): + request = test_case.CalculateCoverageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.calculate_coverage] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("agent", request.agent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def import_test_cases( + self, + request: Optional[Union[test_case.ImportTestCasesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports the test cases from a Cloud Storage bucket or a local + file. It always creates new test cases and won't overwrite any + existing ones. The provided ID in the imported test case is + neglected. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportTestCasesMetadata][google.cloud.dialogflow.cx.v3.ImportTestCasesMetadata] + - ``response``: + [ImportTestCasesResponse][google.cloud.dialogflow.cx.v3.ImportTestCasesResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_import_test_cases(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ImportTestCasesRequest( + gcs_uri="gcs_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_test_cases(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ImportTestCasesRequest, dict]): + The request object. The request message for + [TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ImportTestCases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ImportTestCasesResponse` The response message for + [TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ImportTestCases]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a test_case.ImportTestCasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, test_case.ImportTestCasesRequest): + request = test_case.ImportTestCasesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_test_cases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + test_case.ImportTestCasesResponse, + metadata_type=test_case.ImportTestCasesMetadata, + ) + + # Done; return the response. + return response + + def export_test_cases( + self, + request: Optional[Union[test_case.ExportTestCasesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports the test cases under the agent to a Cloud Storage bucket + or a local file. Filter can be applied to export a subset of + test cases. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportTestCasesMetadata][google.cloud.dialogflow.cx.v3.ExportTestCasesMetadata] + - ``response``: + [ExportTestCasesResponse][google.cloud.dialogflow.cx.v3.ExportTestCasesResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_export_test_cases(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ExportTestCasesRequest( + gcs_uri="gcs_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.export_test_cases(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ExportTestCasesRequest, dict]): + The request object. The request message for + [TestCases.ExportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ExportTestCases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ExportTestCasesResponse` The response message for + [TestCases.ExportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ExportTestCases]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a test_case.ExportTestCasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, test_case.ExportTestCasesRequest): + request = test_case.ExportTestCasesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_test_cases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + test_case.ExportTestCasesResponse, + metadata_type=test_case.ExportTestCasesMetadata, + ) + + # Done; return the response. + return response + + def list_test_case_results( + self, + request: Optional[Union[test_case.ListTestCaseResultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTestCaseResultsPager: + r"""Fetches the list of run results for the given test + case. A maximum of 100 results are kept for each test + case. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_test_case_results(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListTestCaseResultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_test_case_results(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListTestCaseResultsRequest, dict]): + The request object. The request message for + [TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults]. + parent (str): + Required. The test case to list results for. Format: + ``projects//locations//agents// testCases/``. + Specify a ``-`` as a wildcard for TestCase ID to list + results across multiple test cases. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.test_cases.pagers.ListTestCaseResultsPager: + The response message for + [TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a test_case.ListTestCaseResultsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, test_case.ListTestCaseResultsRequest): + request = test_case.ListTestCaseResultsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_test_case_results] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTestCaseResultsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_test_case_result( + self, + request: Optional[Union[test_case.GetTestCaseResultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> test_case.TestCaseResult: + r"""Gets a test case result. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_test_case_result(): + # Create a client + client = dialogflowcx_v3.TestCasesClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetTestCaseResultRequest( + name="name_value", + ) + + # Make the request + response = client.get_test_case_result(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetTestCaseResultRequest, dict]): + The request object. The request message for + [TestCases.GetTestCaseResult][google.cloud.dialogflow.cx.v3.TestCases.GetTestCaseResult]. + name (str): + Required. The name of the testcase. Format: + ``projects//locations//agents//testCases//results/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TestCaseResult: + Represents a result from running a + test case in an agent environment. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a test_case.GetTestCaseResultRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, test_case.GetTestCaseResultRequest): + request = test_case.GetTestCaseResultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_test_case_result] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TestCasesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TestCasesClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/pagers.py new file mode 100644 index 000000000000..999b149cdebf --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import test_case + + +class ListTestCasesPager: + """A pager for iterating through ``list_test_cases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListTestCasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``test_cases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTestCases`` requests and continue to iterate + through the ``test_cases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListTestCasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., test_case.ListTestCasesResponse], + request: test_case.ListTestCasesRequest, + response: test_case.ListTestCasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListTestCasesRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListTestCasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = test_case.ListTestCasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[test_case.ListTestCasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[test_case.TestCase]: + for page in self.pages: + yield from page.test_cases + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTestCasesAsyncPager: + """A pager for iterating through ``list_test_cases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListTestCasesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``test_cases`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTestCases`` requests and continue to iterate + through the ``test_cases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListTestCasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[test_case.ListTestCasesResponse]], + request: test_case.ListTestCasesRequest, + response: test_case.ListTestCasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListTestCasesRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListTestCasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = test_case.ListTestCasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[test_case.ListTestCasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[test_case.TestCase]: + async def async_generator(): + async for page in self.pages: + for response in page.test_cases: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTestCaseResultsPager: + """A pager for iterating through ``list_test_case_results`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListTestCaseResultsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``test_case_results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTestCaseResults`` requests and continue to iterate + through the ``test_case_results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListTestCaseResultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., test_case.ListTestCaseResultsResponse], + request: test_case.ListTestCaseResultsRequest, + response: test_case.ListTestCaseResultsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListTestCaseResultsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListTestCaseResultsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = test_case.ListTestCaseResultsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[test_case.ListTestCaseResultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[test_case.TestCaseResult]: + for page in self.pages: + yield from page.test_case_results + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTestCaseResultsAsyncPager: + """A pager for iterating through ``list_test_case_results`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListTestCaseResultsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``test_case_results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTestCaseResults`` requests and continue to iterate + through the ``test_case_results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListTestCaseResultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[test_case.ListTestCaseResultsResponse]], + request: test_case.ListTestCaseResultsRequest, + response: test_case.ListTestCaseResultsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListTestCaseResultsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListTestCaseResultsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = test_case.ListTestCaseResultsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[test_case.ListTestCaseResultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[test_case.TestCaseResult]: + async def async_generator(): + async for page in self.pages: + for response in page.test_case_results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/__init__.py new file mode 100644 index 000000000000..8ae8b75ff338 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TestCasesTransport +from .grpc import TestCasesGrpcTransport +from .grpc_asyncio import TestCasesGrpcAsyncIOTransport +from .rest import TestCasesRestInterceptor, TestCasesRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TestCasesTransport]] +_transport_registry["grpc"] = TestCasesGrpcTransport +_transport_registry["grpc_asyncio"] = TestCasesGrpcAsyncIOTransport +_transport_registry["rest"] = TestCasesRestTransport + +__all__ = ( + "TestCasesTransport", + "TestCasesGrpcTransport", + "TestCasesGrpcAsyncIOTransport", + "TestCasesRestTransport", + "TestCasesRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/base.py new file mode 100644 index 000000000000..e2464bb2cabd --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/base.py @@ -0,0 +1,377 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import test_case +from google.cloud.dialogflowcx_v3.types import test_case as gcdc_test_case + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TestCasesTransport(abc.ABC): + """Abstract transport class for TestCases.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_test_cases: gapic_v1.method.wrap_method( + self.list_test_cases, + default_timeout=None, + client_info=client_info, + ), + self.batch_delete_test_cases: gapic_v1.method.wrap_method( + self.batch_delete_test_cases, + default_timeout=None, + client_info=client_info, + ), + self.get_test_case: gapic_v1.method.wrap_method( + self.get_test_case, + default_timeout=None, + client_info=client_info, + ), + self.create_test_case: gapic_v1.method.wrap_method( + self.create_test_case, + default_timeout=None, + client_info=client_info, + ), + self.update_test_case: gapic_v1.method.wrap_method( + self.update_test_case, + default_timeout=None, + client_info=client_info, + ), + self.run_test_case: gapic_v1.method.wrap_method( + self.run_test_case, + default_timeout=None, + client_info=client_info, + ), + self.batch_run_test_cases: gapic_v1.method.wrap_method( + self.batch_run_test_cases, + default_timeout=None, + client_info=client_info, + ), + self.calculate_coverage: gapic_v1.method.wrap_method( + self.calculate_coverage, + default_timeout=None, + client_info=client_info, + ), + self.import_test_cases: gapic_v1.method.wrap_method( + self.import_test_cases, + default_timeout=None, + client_info=client_info, + ), + self.export_test_cases: gapic_v1.method.wrap_method( + self.export_test_cases, + default_timeout=None, + client_info=client_info, + ), + self.list_test_case_results: gapic_v1.method.wrap_method( + self.list_test_case_results, + default_timeout=None, + client_info=client_info, + ), + self.get_test_case_result: gapic_v1.method.wrap_method( + self.get_test_case_result, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_test_cases( + self, + ) -> Callable[ + [test_case.ListTestCasesRequest], + Union[ + test_case.ListTestCasesResponse, Awaitable[test_case.ListTestCasesResponse] + ], + ]: + raise NotImplementedError() + + @property + def batch_delete_test_cases( + self, + ) -> Callable[ + [test_case.BatchDeleteTestCasesRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_test_case( + self, + ) -> Callable[ + [test_case.GetTestCaseRequest], + Union[test_case.TestCase, Awaitable[test_case.TestCase]], + ]: + raise NotImplementedError() + + @property + def create_test_case( + self, + ) -> Callable[ + [gcdc_test_case.CreateTestCaseRequest], + Union[gcdc_test_case.TestCase, Awaitable[gcdc_test_case.TestCase]], + ]: + raise NotImplementedError() + + @property + def update_test_case( + self, + ) -> Callable[ + [gcdc_test_case.UpdateTestCaseRequest], + Union[gcdc_test_case.TestCase, Awaitable[gcdc_test_case.TestCase]], + ]: + raise NotImplementedError() + + @property + def run_test_case( + self, + ) -> Callable[ + [test_case.RunTestCaseRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def batch_run_test_cases( + self, + ) -> Callable[ + [test_case.BatchRunTestCasesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def calculate_coverage( + self, + ) -> Callable[ + [test_case.CalculateCoverageRequest], + Union[ + test_case.CalculateCoverageResponse, + Awaitable[test_case.CalculateCoverageResponse], + ], + ]: + raise NotImplementedError() + + @property + def import_test_cases( + self, + ) -> Callable[ + [test_case.ImportTestCasesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_test_cases( + self, + ) -> Callable[ + [test_case.ExportTestCasesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_test_case_results( + self, + ) -> Callable[ + [test_case.ListTestCaseResultsRequest], + Union[ + test_case.ListTestCaseResultsResponse, + Awaitable[test_case.ListTestCaseResultsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_test_case_result( + self, + ) -> Callable[ + [test_case.GetTestCaseResultRequest], + Union[test_case.TestCaseResult, Awaitable[test_case.TestCaseResult]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TestCasesTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/grpc.py new file mode 100644 index 000000000000..61f6181915e8 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/grpc.py @@ -0,0 +1,712 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import test_case +from google.cloud.dialogflowcx_v3.types import test_case as gcdc_test_case + +from .base import DEFAULT_CLIENT_INFO, TestCasesTransport + + +class TestCasesGrpcTransport(TestCasesTransport): + """gRPC backend transport for TestCases. + + Service for managing [Test + Cases][google.cloud.dialogflow.cx.v3.TestCase] and [Test Case + Results][google.cloud.dialogflow.cx.v3.TestCaseResult]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_test_cases( + self, + ) -> Callable[[test_case.ListTestCasesRequest], test_case.ListTestCasesResponse]: + r"""Return a callable for the list test cases method over gRPC. + + Fetches a list of test cases for a given agent. + + Returns: + Callable[[~.ListTestCasesRequest], + ~.ListTestCasesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_test_cases" not in self._stubs: + self._stubs["list_test_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/ListTestCases", + request_serializer=test_case.ListTestCasesRequest.serialize, + response_deserializer=test_case.ListTestCasesResponse.deserialize, + ) + return self._stubs["list_test_cases"] + + @property + def batch_delete_test_cases( + self, + ) -> Callable[[test_case.BatchDeleteTestCasesRequest], empty_pb2.Empty]: + r"""Return a callable for the batch delete test cases method over gRPC. + + Batch deletes test cases. + + Returns: + Callable[[~.BatchDeleteTestCasesRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_delete_test_cases" not in self._stubs: + self._stubs["batch_delete_test_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/BatchDeleteTestCases", + request_serializer=test_case.BatchDeleteTestCasesRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["batch_delete_test_cases"] + + @property + def get_test_case( + self, + ) -> Callable[[test_case.GetTestCaseRequest], test_case.TestCase]: + r"""Return a callable for the get test case method over gRPC. + + Gets a test case. + + Returns: + Callable[[~.GetTestCaseRequest], + ~.TestCase]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_test_case" not in self._stubs: + self._stubs["get_test_case"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/GetTestCase", + request_serializer=test_case.GetTestCaseRequest.serialize, + response_deserializer=test_case.TestCase.deserialize, + ) + return self._stubs["get_test_case"] + + @property + def create_test_case( + self, + ) -> Callable[[gcdc_test_case.CreateTestCaseRequest], gcdc_test_case.TestCase]: + r"""Return a callable for the create test case method over gRPC. + + Creates a test case for the given agent. + + Returns: + Callable[[~.CreateTestCaseRequest], + ~.TestCase]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_test_case" not in self._stubs: + self._stubs["create_test_case"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/CreateTestCase", + request_serializer=gcdc_test_case.CreateTestCaseRequest.serialize, + response_deserializer=gcdc_test_case.TestCase.deserialize, + ) + return self._stubs["create_test_case"] + + @property + def update_test_case( + self, + ) -> Callable[[gcdc_test_case.UpdateTestCaseRequest], gcdc_test_case.TestCase]: + r"""Return a callable for the update test case method over gRPC. + + Updates the specified test case. + + Returns: + Callable[[~.UpdateTestCaseRequest], + ~.TestCase]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_test_case" not in self._stubs: + self._stubs["update_test_case"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/UpdateTestCase", + request_serializer=gcdc_test_case.UpdateTestCaseRequest.serialize, + response_deserializer=gcdc_test_case.TestCase.deserialize, + ) + return self._stubs["update_test_case"] + + @property + def run_test_case( + self, + ) -> Callable[[test_case.RunTestCaseRequest], operations_pb2.Operation]: + r"""Return a callable for the run test case method over gRPC. + + Kicks off a test case run. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [RunTestCaseMetadata][google.cloud.dialogflow.cx.v3.RunTestCaseMetadata] + - ``response``: + [RunTestCaseResponse][google.cloud.dialogflow.cx.v3.RunTestCaseResponse] + + Returns: + Callable[[~.RunTestCaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_test_case" not in self._stubs: + self._stubs["run_test_case"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/RunTestCase", + request_serializer=test_case.RunTestCaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["run_test_case"] + + @property + def batch_run_test_cases( + self, + ) -> Callable[[test_case.BatchRunTestCasesRequest], operations_pb2.Operation]: + r"""Return a callable for the batch run test cases method over gRPC. + + Kicks off a batch run of test cases. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [BatchRunTestCasesMetadata][google.cloud.dialogflow.cx.v3.BatchRunTestCasesMetadata] + - ``response``: + [BatchRunTestCasesResponse][google.cloud.dialogflow.cx.v3.BatchRunTestCasesResponse] + + Returns: + Callable[[~.BatchRunTestCasesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_run_test_cases" not in self._stubs: + self._stubs["batch_run_test_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/BatchRunTestCases", + request_serializer=test_case.BatchRunTestCasesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_run_test_cases"] + + @property + def calculate_coverage( + self, + ) -> Callable[ + [test_case.CalculateCoverageRequest], test_case.CalculateCoverageResponse + ]: + r"""Return a callable for the calculate coverage method over gRPC. + + Calculates the test coverage for an agent. + + Returns: + Callable[[~.CalculateCoverageRequest], + ~.CalculateCoverageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "calculate_coverage" not in self._stubs: + self._stubs["calculate_coverage"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/CalculateCoverage", + request_serializer=test_case.CalculateCoverageRequest.serialize, + response_deserializer=test_case.CalculateCoverageResponse.deserialize, + ) + return self._stubs["calculate_coverage"] + + @property + def import_test_cases( + self, + ) -> Callable[[test_case.ImportTestCasesRequest], operations_pb2.Operation]: + r"""Return a callable for the import test cases method over gRPC. + + Imports the test cases from a Cloud Storage bucket or a local + file. It always creates new test cases and won't overwrite any + existing ones. The provided ID in the imported test case is + neglected. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportTestCasesMetadata][google.cloud.dialogflow.cx.v3.ImportTestCasesMetadata] + - ``response``: + [ImportTestCasesResponse][google.cloud.dialogflow.cx.v3.ImportTestCasesResponse] + + Returns: + Callable[[~.ImportTestCasesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_test_cases" not in self._stubs: + self._stubs["import_test_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/ImportTestCases", + request_serializer=test_case.ImportTestCasesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_test_cases"] + + @property + def export_test_cases( + self, + ) -> Callable[[test_case.ExportTestCasesRequest], operations_pb2.Operation]: + r"""Return a callable for the export test cases method over gRPC. + + Exports the test cases under the agent to a Cloud Storage bucket + or a local file. Filter can be applied to export a subset of + test cases. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportTestCasesMetadata][google.cloud.dialogflow.cx.v3.ExportTestCasesMetadata] + - ``response``: + [ExportTestCasesResponse][google.cloud.dialogflow.cx.v3.ExportTestCasesResponse] + + Returns: + Callable[[~.ExportTestCasesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_test_cases" not in self._stubs: + self._stubs["export_test_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/ExportTestCases", + request_serializer=test_case.ExportTestCasesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_test_cases"] + + @property + def list_test_case_results( + self, + ) -> Callable[ + [test_case.ListTestCaseResultsRequest], test_case.ListTestCaseResultsResponse + ]: + r"""Return a callable for the list test case results method over gRPC. + + Fetches the list of run results for the given test + case. A maximum of 100 results are kept for each test + case. + + Returns: + Callable[[~.ListTestCaseResultsRequest], + ~.ListTestCaseResultsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_test_case_results" not in self._stubs: + self._stubs["list_test_case_results"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/ListTestCaseResults", + request_serializer=test_case.ListTestCaseResultsRequest.serialize, + response_deserializer=test_case.ListTestCaseResultsResponse.deserialize, + ) + return self._stubs["list_test_case_results"] + + @property + def get_test_case_result( + self, + ) -> Callable[[test_case.GetTestCaseResultRequest], test_case.TestCaseResult]: + r"""Return a callable for the get test case result method over gRPC. + + Gets a test case result. + + Returns: + Callable[[~.GetTestCaseResultRequest], + ~.TestCaseResult]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_test_case_result" not in self._stubs: + self._stubs["get_test_case_result"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/GetTestCaseResult", + request_serializer=test_case.GetTestCaseResultRequest.serialize, + response_deserializer=test_case.TestCaseResult.deserialize, + ) + return self._stubs["get_test_case_result"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TestCasesGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/grpc_asyncio.py new file mode 100644 index 000000000000..31ec81d32bf4 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/grpc_asyncio.py @@ -0,0 +1,729 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import test_case +from google.cloud.dialogflowcx_v3.types import test_case as gcdc_test_case + +from .base import DEFAULT_CLIENT_INFO, TestCasesTransport +from .grpc import TestCasesGrpcTransport + + +class TestCasesGrpcAsyncIOTransport(TestCasesTransport): + """gRPC AsyncIO backend transport for TestCases. + + Service for managing [Test + Cases][google.cloud.dialogflow.cx.v3.TestCase] and [Test Case + Results][google.cloud.dialogflow.cx.v3.TestCaseResult]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_test_cases( + self, + ) -> Callable[ + [test_case.ListTestCasesRequest], Awaitable[test_case.ListTestCasesResponse] + ]: + r"""Return a callable for the list test cases method over gRPC. + + Fetches a list of test cases for a given agent. + + Returns: + Callable[[~.ListTestCasesRequest], + Awaitable[~.ListTestCasesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_test_cases" not in self._stubs: + self._stubs["list_test_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/ListTestCases", + request_serializer=test_case.ListTestCasesRequest.serialize, + response_deserializer=test_case.ListTestCasesResponse.deserialize, + ) + return self._stubs["list_test_cases"] + + @property + def batch_delete_test_cases( + self, + ) -> Callable[[test_case.BatchDeleteTestCasesRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the batch delete test cases method over gRPC. + + Batch deletes test cases. + + Returns: + Callable[[~.BatchDeleteTestCasesRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_delete_test_cases" not in self._stubs: + self._stubs["batch_delete_test_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/BatchDeleteTestCases", + request_serializer=test_case.BatchDeleteTestCasesRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["batch_delete_test_cases"] + + @property + def get_test_case( + self, + ) -> Callable[[test_case.GetTestCaseRequest], Awaitable[test_case.TestCase]]: + r"""Return a callable for the get test case method over gRPC. + + Gets a test case. + + Returns: + Callable[[~.GetTestCaseRequest], + Awaitable[~.TestCase]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_test_case" not in self._stubs: + self._stubs["get_test_case"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/GetTestCase", + request_serializer=test_case.GetTestCaseRequest.serialize, + response_deserializer=test_case.TestCase.deserialize, + ) + return self._stubs["get_test_case"] + + @property + def create_test_case( + self, + ) -> Callable[ + [gcdc_test_case.CreateTestCaseRequest], Awaitable[gcdc_test_case.TestCase] + ]: + r"""Return a callable for the create test case method over gRPC. + + Creates a test case for the given agent. + + Returns: + Callable[[~.CreateTestCaseRequest], + Awaitable[~.TestCase]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_test_case" not in self._stubs: + self._stubs["create_test_case"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/CreateTestCase", + request_serializer=gcdc_test_case.CreateTestCaseRequest.serialize, + response_deserializer=gcdc_test_case.TestCase.deserialize, + ) + return self._stubs["create_test_case"] + + @property + def update_test_case( + self, + ) -> Callable[ + [gcdc_test_case.UpdateTestCaseRequest], Awaitable[gcdc_test_case.TestCase] + ]: + r"""Return a callable for the update test case method over gRPC. + + Updates the specified test case. + + Returns: + Callable[[~.UpdateTestCaseRequest], + Awaitable[~.TestCase]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_test_case" not in self._stubs: + self._stubs["update_test_case"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/UpdateTestCase", + request_serializer=gcdc_test_case.UpdateTestCaseRequest.serialize, + response_deserializer=gcdc_test_case.TestCase.deserialize, + ) + return self._stubs["update_test_case"] + + @property + def run_test_case( + self, + ) -> Callable[[test_case.RunTestCaseRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the run test case method over gRPC. + + Kicks off a test case run. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [RunTestCaseMetadata][google.cloud.dialogflow.cx.v3.RunTestCaseMetadata] + - ``response``: + [RunTestCaseResponse][google.cloud.dialogflow.cx.v3.RunTestCaseResponse] + + Returns: + Callable[[~.RunTestCaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_test_case" not in self._stubs: + self._stubs["run_test_case"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/RunTestCase", + request_serializer=test_case.RunTestCaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["run_test_case"] + + @property + def batch_run_test_cases( + self, + ) -> Callable[ + [test_case.BatchRunTestCasesRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the batch run test cases method over gRPC. + + Kicks off a batch run of test cases. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [BatchRunTestCasesMetadata][google.cloud.dialogflow.cx.v3.BatchRunTestCasesMetadata] + - ``response``: + [BatchRunTestCasesResponse][google.cloud.dialogflow.cx.v3.BatchRunTestCasesResponse] + + Returns: + Callable[[~.BatchRunTestCasesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_run_test_cases" not in self._stubs: + self._stubs["batch_run_test_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/BatchRunTestCases", + request_serializer=test_case.BatchRunTestCasesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["batch_run_test_cases"] + + @property + def calculate_coverage( + self, + ) -> Callable[ + [test_case.CalculateCoverageRequest], + Awaitable[test_case.CalculateCoverageResponse], + ]: + r"""Return a callable for the calculate coverage method over gRPC. + + Calculates the test coverage for an agent. + + Returns: + Callable[[~.CalculateCoverageRequest], + Awaitable[~.CalculateCoverageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "calculate_coverage" not in self._stubs: + self._stubs["calculate_coverage"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/CalculateCoverage", + request_serializer=test_case.CalculateCoverageRequest.serialize, + response_deserializer=test_case.CalculateCoverageResponse.deserialize, + ) + return self._stubs["calculate_coverage"] + + @property + def import_test_cases( + self, + ) -> Callable[ + [test_case.ImportTestCasesRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the import test cases method over gRPC. + + Imports the test cases from a Cloud Storage bucket or a local + file. It always creates new test cases and won't overwrite any + existing ones. The provided ID in the imported test case is + neglected. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportTestCasesMetadata][google.cloud.dialogflow.cx.v3.ImportTestCasesMetadata] + - ``response``: + [ImportTestCasesResponse][google.cloud.dialogflow.cx.v3.ImportTestCasesResponse] + + Returns: + Callable[[~.ImportTestCasesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_test_cases" not in self._stubs: + self._stubs["import_test_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/ImportTestCases", + request_serializer=test_case.ImportTestCasesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_test_cases"] + + @property + def export_test_cases( + self, + ) -> Callable[ + [test_case.ExportTestCasesRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the export test cases method over gRPC. + + Exports the test cases under the agent to a Cloud Storage bucket + or a local file. Filter can be applied to export a subset of + test cases. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportTestCasesMetadata][google.cloud.dialogflow.cx.v3.ExportTestCasesMetadata] + - ``response``: + [ExportTestCasesResponse][google.cloud.dialogflow.cx.v3.ExportTestCasesResponse] + + Returns: + Callable[[~.ExportTestCasesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_test_cases" not in self._stubs: + self._stubs["export_test_cases"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/ExportTestCases", + request_serializer=test_case.ExportTestCasesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_test_cases"] + + @property + def list_test_case_results( + self, + ) -> Callable[ + [test_case.ListTestCaseResultsRequest], + Awaitable[test_case.ListTestCaseResultsResponse], + ]: + r"""Return a callable for the list test case results method over gRPC. + + Fetches the list of run results for the given test + case. A maximum of 100 results are kept for each test + case. + + Returns: + Callable[[~.ListTestCaseResultsRequest], + Awaitable[~.ListTestCaseResultsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_test_case_results" not in self._stubs: + self._stubs["list_test_case_results"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/ListTestCaseResults", + request_serializer=test_case.ListTestCaseResultsRequest.serialize, + response_deserializer=test_case.ListTestCaseResultsResponse.deserialize, + ) + return self._stubs["list_test_case_results"] + + @property + def get_test_case_result( + self, + ) -> Callable[ + [test_case.GetTestCaseResultRequest], Awaitable[test_case.TestCaseResult] + ]: + r"""Return a callable for the get test case result method over gRPC. + + Gets a test case result. + + Returns: + Callable[[~.GetTestCaseResultRequest], + Awaitable[~.TestCaseResult]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_test_case_result" not in self._stubs: + self._stubs["get_test_case_result"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TestCases/GetTestCaseResult", + request_serializer=test_case.GetTestCaseResultRequest.serialize, + response_deserializer=test_case.TestCaseResult.deserialize, + ) + return self._stubs["get_test_case_result"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("TestCasesGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/rest.py new file mode 100644 index 000000000000..070569037a12 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/rest.py @@ -0,0 +1,2285 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import test_case +from google.cloud.dialogflowcx_v3.types import test_case as gcdc_test_case + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TestCasesTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TestCasesRestInterceptor: + """Interceptor for TestCases. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TestCasesRestTransport. + + .. code-block:: python + class MyCustomTestCasesInterceptor(TestCasesRestInterceptor): + def pre_batch_delete_test_cases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_batch_run_test_cases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_run_test_cases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_calculate_coverage(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_calculate_coverage(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_test_case(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_test_case(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_export_test_cases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_test_cases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_test_case(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_test_case(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_test_case_result(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_test_case_result(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_import_test_cases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_test_cases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_test_case_results(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_test_case_results(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_test_cases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_test_cases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_test_case(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_test_case(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_test_case(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_test_case(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TestCasesRestTransport(interceptor=MyCustomTestCasesInterceptor()) + client = TestCasesClient(transport=transport) + + + """ + + def pre_batch_delete_test_cases( + self, + request: test_case.BatchDeleteTestCasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[test_case.BatchDeleteTestCasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_delete_test_cases + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def pre_batch_run_test_cases( + self, + request: test_case.BatchRunTestCasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[test_case.BatchRunTestCasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_run_test_cases + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_batch_run_test_cases( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for batch_run_test_cases + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_calculate_coverage( + self, + request: test_case.CalculateCoverageRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[test_case.CalculateCoverageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for calculate_coverage + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_calculate_coverage( + self, response: test_case.CalculateCoverageResponse + ) -> test_case.CalculateCoverageResponse: + """Post-rpc interceptor for calculate_coverage + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_create_test_case( + self, + request: gcdc_test_case.CreateTestCaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_test_case.CreateTestCaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_test_case + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_create_test_case( + self, response: gcdc_test_case.TestCase + ) -> gcdc_test_case.TestCase: + """Post-rpc interceptor for create_test_case + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_export_test_cases( + self, + request: test_case.ExportTestCasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[test_case.ExportTestCasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_test_cases + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_export_test_cases( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_test_cases + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_get_test_case( + self, request: test_case.GetTestCaseRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[test_case.GetTestCaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_test_case + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_get_test_case(self, response: test_case.TestCase) -> test_case.TestCase: + """Post-rpc interceptor for get_test_case + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_get_test_case_result( + self, + request: test_case.GetTestCaseResultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[test_case.GetTestCaseResultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_test_case_result + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_get_test_case_result( + self, response: test_case.TestCaseResult + ) -> test_case.TestCaseResult: + """Post-rpc interceptor for get_test_case_result + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_import_test_cases( + self, + request: test_case.ImportTestCasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[test_case.ImportTestCasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_test_cases + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_import_test_cases( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_test_cases + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_list_test_case_results( + self, + request: test_case.ListTestCaseResultsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[test_case.ListTestCaseResultsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_test_case_results + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_list_test_case_results( + self, response: test_case.ListTestCaseResultsResponse + ) -> test_case.ListTestCaseResultsResponse: + """Post-rpc interceptor for list_test_case_results + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_list_test_cases( + self, + request: test_case.ListTestCasesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[test_case.ListTestCasesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_test_cases + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_list_test_cases( + self, response: test_case.ListTestCasesResponse + ) -> test_case.ListTestCasesResponse: + """Post-rpc interceptor for list_test_cases + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_run_test_case( + self, request: test_case.RunTestCaseRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[test_case.RunTestCaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_test_case + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_run_test_case( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for run_test_case + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_update_test_case( + self, + request: gcdc_test_case.UpdateTestCaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_test_case.UpdateTestCaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_test_case + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_update_test_case( + self, response: gcdc_test_case.TestCase + ) -> gcdc_test_case.TestCase: + """Post-rpc interceptor for update_test_case + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the TestCases server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the TestCases server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TestCasesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TestCasesRestInterceptor + + +class TestCasesRestTransport(TestCasesTransport): + """REST backend transport for TestCases. + + Service for managing [Test + Cases][google.cloud.dialogflow.cx.v3.TestCase] and [Test Case + Results][google.cloud.dialogflow.cx.v3.TestCaseResult]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TestCasesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TestCasesRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v3", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _BatchDeleteTestCases(TestCasesRestStub): + def __hash__(self): + return hash("BatchDeleteTestCases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: test_case.BatchDeleteTestCasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the batch delete test cases method over HTTP. + + Args: + request (~.test_case.BatchDeleteTestCasesRequest): + The request object. The request message for + [TestCases.BatchDeleteTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchDeleteTestCases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/testCases:batchDelete", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_delete_test_cases( + request, metadata + ) + pb_request = test_case.BatchDeleteTestCasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _BatchRunTestCases(TestCasesRestStub): + def __hash__(self): + return hash("BatchRunTestCases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: test_case.BatchRunTestCasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the batch run test cases method over HTTP. + + Args: + request (~.test_case.BatchRunTestCasesRequest): + The request object. The request message for + [TestCases.BatchRunTestCases][google.cloud.dialogflow.cx.v3.TestCases.BatchRunTestCases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/testCases:batchRun", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_run_test_cases( + request, metadata + ) + pb_request = test_case.BatchRunTestCasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_run_test_cases(resp) + return resp + + class _CalculateCoverage(TestCasesRestStub): + def __hash__(self): + return hash("CalculateCoverage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "type": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: test_case.CalculateCoverageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> test_case.CalculateCoverageResponse: + r"""Call the calculate coverage method over HTTP. + + Args: + request (~.test_case.CalculateCoverageRequest): + The request object. The request message for + [TestCases.CalculateCoverage][google.cloud.dialogflow.cx.v3.TestCases.CalculateCoverage]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.test_case.CalculateCoverageResponse: + The response message for + [TestCases.CalculateCoverage][google.cloud.dialogflow.cx.v3.TestCases.CalculateCoverage]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{agent=projects/*/locations/*/agents/*}/testCases:calculateCoverage", + }, + ] + request, metadata = self._interceptor.pre_calculate_coverage( + request, metadata + ) + pb_request = test_case.CalculateCoverageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = test_case.CalculateCoverageResponse() + pb_resp = test_case.CalculateCoverageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_calculate_coverage(resp) + return resp + + class _CreateTestCase(TestCasesRestStub): + def __hash__(self): + return hash("CreateTestCase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_test_case.CreateTestCaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_test_case.TestCase: + r"""Call the create test case method over HTTP. + + Args: + request (~.gcdc_test_case.CreateTestCaseRequest): + The request object. The request message for + [TestCases.CreateTestCase][google.cloud.dialogflow.cx.v3.TestCases.CreateTestCase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_test_case.TestCase: + Represents a test case. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/testCases", + "body": "test_case", + }, + ] + request, metadata = self._interceptor.pre_create_test_case( + request, metadata + ) + pb_request = gcdc_test_case.CreateTestCaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_test_case.TestCase() + pb_resp = gcdc_test_case.TestCase.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_test_case(resp) + return resp + + class _ExportTestCases(TestCasesRestStub): + def __hash__(self): + return hash("ExportTestCases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: test_case.ExportTestCasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export test cases method over HTTP. + + Args: + request (~.test_case.ExportTestCasesRequest): + The request object. The request message for + [TestCases.ExportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ExportTestCases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/testCases:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_test_cases( + request, metadata + ) + pb_request = test_case.ExportTestCasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_test_cases(resp) + return resp + + class _GetTestCase(TestCasesRestStub): + def __hash__(self): + return hash("GetTestCase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: test_case.GetTestCaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> test_case.TestCase: + r"""Call the get test case method over HTTP. + + Args: + request (~.test_case.GetTestCaseRequest): + The request object. The request message for + [TestCases.GetTestCase][google.cloud.dialogflow.cx.v3.TestCases.GetTestCase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.test_case.TestCase: + Represents a test case. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/testCases/*}", + }, + ] + request, metadata = self._interceptor.pre_get_test_case(request, metadata) + pb_request = test_case.GetTestCaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = test_case.TestCase() + pb_resp = test_case.TestCase.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_test_case(resp) + return resp + + class _GetTestCaseResult(TestCasesRestStub): + def __hash__(self): + return hash("GetTestCaseResult") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: test_case.GetTestCaseResultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> test_case.TestCaseResult: + r"""Call the get test case result method over HTTP. + + Args: + request (~.test_case.GetTestCaseResultRequest): + The request object. The request message for + [TestCases.GetTestCaseResult][google.cloud.dialogflow.cx.v3.TestCases.GetTestCaseResult]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.test_case.TestCaseResult: + Represents a result from running a + test case in an agent environment. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/testCases/*/results/*}", + }, + ] + request, metadata = self._interceptor.pre_get_test_case_result( + request, metadata + ) + pb_request = test_case.GetTestCaseResultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = test_case.TestCaseResult() + pb_resp = test_case.TestCaseResult.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_test_case_result(resp) + return resp + + class _ImportTestCases(TestCasesRestStub): + def __hash__(self): + return hash("ImportTestCases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: test_case.ImportTestCasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import test cases method over HTTP. + + Args: + request (~.test_case.ImportTestCasesRequest): + The request object. The request message for + [TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ImportTestCases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/testCases:import", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_test_cases( + request, metadata + ) + pb_request = test_case.ImportTestCasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_test_cases(resp) + return resp + + class _ListTestCaseResults(TestCasesRestStub): + def __hash__(self): + return hash("ListTestCaseResults") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: test_case.ListTestCaseResultsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> test_case.ListTestCaseResultsResponse: + r"""Call the list test case results method over HTTP. + + Args: + request (~.test_case.ListTestCaseResultsRequest): + The request object. The request message for + [TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.test_case.ListTestCaseResultsResponse: + The response message for + [TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/testCases/*}/results", + }, + ] + request, metadata = self._interceptor.pre_list_test_case_results( + request, metadata + ) + pb_request = test_case.ListTestCaseResultsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = test_case.ListTestCaseResultsResponse() + pb_resp = test_case.ListTestCaseResultsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_test_case_results(resp) + return resp + + class _ListTestCases(TestCasesRestStub): + def __hash__(self): + return hash("ListTestCases") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: test_case.ListTestCasesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> test_case.ListTestCasesResponse: + r"""Call the list test cases method over HTTP. + + Args: + request (~.test_case.ListTestCasesRequest): + The request object. The request message for + [TestCases.ListTestCases][google.cloud.dialogflow.cx.v3.TestCases.ListTestCases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.test_case.ListTestCasesResponse: + The response message for + [TestCases.ListTestCases][google.cloud.dialogflow.cx.v3.TestCases.ListTestCases]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/testCases", + }, + ] + request, metadata = self._interceptor.pre_list_test_cases(request, metadata) + pb_request = test_case.ListTestCasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = test_case.ListTestCasesResponse() + pb_resp = test_case.ListTestCasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_test_cases(resp) + return resp + + class _RunTestCase(TestCasesRestStub): + def __hash__(self): + return hash("RunTestCase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: test_case.RunTestCaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the run test case method over HTTP. + + Args: + request (~.test_case.RunTestCaseRequest): + The request object. The request message for + [TestCases.RunTestCase][google.cloud.dialogflow.cx.v3.TestCases.RunTestCase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/agents/*/testCases/*}:run", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_run_test_case(request, metadata) + pb_request = test_case.RunTestCaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_test_case(resp) + return resp + + class _UpdateTestCase(TestCasesRestStub): + def __hash__(self): + return hash("UpdateTestCase") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_test_case.UpdateTestCaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_test_case.TestCase: + r"""Call the update test case method over HTTP. + + Args: + request (~.gcdc_test_case.UpdateTestCaseRequest): + The request object. The request message for + [TestCases.UpdateTestCase][google.cloud.dialogflow.cx.v3.TestCases.UpdateTestCase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_test_case.TestCase: + Represents a test case. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{test_case.name=projects/*/locations/*/agents/*/testCases/*}", + "body": "test_case", + }, + ] + request, metadata = self._interceptor.pre_update_test_case( + request, metadata + ) + pb_request = gcdc_test_case.UpdateTestCaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_test_case.TestCase() + pb_resp = gcdc_test_case.TestCase.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_test_case(resp) + return resp + + @property + def batch_delete_test_cases( + self, + ) -> Callable[[test_case.BatchDeleteTestCasesRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchDeleteTestCases(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_run_test_cases( + self, + ) -> Callable[[test_case.BatchRunTestCasesRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchRunTestCases(self._session, self._host, self._interceptor) # type: ignore + + @property + def calculate_coverage( + self, + ) -> Callable[ + [test_case.CalculateCoverageRequest], test_case.CalculateCoverageResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CalculateCoverage(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_test_case( + self, + ) -> Callable[[gcdc_test_case.CreateTestCaseRequest], gcdc_test_case.TestCase]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTestCase(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_test_cases( + self, + ) -> Callable[[test_case.ExportTestCasesRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportTestCases(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_test_case( + self, + ) -> Callable[[test_case.GetTestCaseRequest], test_case.TestCase]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTestCase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_test_case_result( + self, + ) -> Callable[[test_case.GetTestCaseResultRequest], test_case.TestCaseResult]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTestCaseResult(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_test_cases( + self, + ) -> Callable[[test_case.ImportTestCasesRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportTestCases(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_test_case_results( + self, + ) -> Callable[ + [test_case.ListTestCaseResultsRequest], test_case.ListTestCaseResultsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTestCaseResults(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_test_cases( + self, + ) -> Callable[[test_case.ListTestCasesRequest], test_case.ListTestCasesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTestCases(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_test_case( + self, + ) -> Callable[[test_case.RunTestCaseRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunTestCase(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_test_case( + self, + ) -> Callable[[gcdc_test_case.UpdateTestCaseRequest], gcdc_test_case.TestCase]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTestCase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(TestCasesRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(TestCasesRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(TestCasesRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(TestCasesRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(TestCasesRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TestCasesRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/__init__.py new file mode 100644 index 000000000000..bed759a5a677 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TransitionRouteGroupsAsyncClient +from .client import TransitionRouteGroupsClient + +__all__ = ( + "TransitionRouteGroupsClient", + "TransitionRouteGroupsAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py new file mode 100644 index 000000000000..6bcefbb3a380 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py @@ -0,0 +1,1117 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.transition_route_groups import pagers +from google.cloud.dialogflowcx_v3.types import ( + transition_route_group as gcdc_transition_route_group, +) +from google.cloud.dialogflowcx_v3.types import page +from google.cloud.dialogflowcx_v3.types import transition_route_group + +from .client import TransitionRouteGroupsClient +from .transports.base import DEFAULT_CLIENT_INFO, TransitionRouteGroupsTransport +from .transports.grpc_asyncio import TransitionRouteGroupsGrpcAsyncIOTransport + + +class TransitionRouteGroupsAsyncClient: + """Service for managing + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + """ + + _client: TransitionRouteGroupsClient + + DEFAULT_ENDPOINT = TransitionRouteGroupsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TransitionRouteGroupsClient.DEFAULT_MTLS_ENDPOINT + + flow_path = staticmethod(TransitionRouteGroupsClient.flow_path) + parse_flow_path = staticmethod(TransitionRouteGroupsClient.parse_flow_path) + intent_path = staticmethod(TransitionRouteGroupsClient.intent_path) + parse_intent_path = staticmethod(TransitionRouteGroupsClient.parse_intent_path) + page_path = staticmethod(TransitionRouteGroupsClient.page_path) + parse_page_path = staticmethod(TransitionRouteGroupsClient.parse_page_path) + transition_route_group_path = staticmethod( + TransitionRouteGroupsClient.transition_route_group_path + ) + parse_transition_route_group_path = staticmethod( + TransitionRouteGroupsClient.parse_transition_route_group_path + ) + webhook_path = staticmethod(TransitionRouteGroupsClient.webhook_path) + parse_webhook_path = staticmethod(TransitionRouteGroupsClient.parse_webhook_path) + common_billing_account_path = staticmethod( + TransitionRouteGroupsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TransitionRouteGroupsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(TransitionRouteGroupsClient.common_folder_path) + parse_common_folder_path = staticmethod( + TransitionRouteGroupsClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + TransitionRouteGroupsClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + TransitionRouteGroupsClient.parse_common_organization_path + ) + common_project_path = staticmethod(TransitionRouteGroupsClient.common_project_path) + parse_common_project_path = staticmethod( + TransitionRouteGroupsClient.parse_common_project_path + ) + common_location_path = staticmethod( + TransitionRouteGroupsClient.common_location_path + ) + parse_common_location_path = staticmethod( + TransitionRouteGroupsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TransitionRouteGroupsAsyncClient: The constructed client. + """ + return TransitionRouteGroupsClient.from_service_account_info.__func__(TransitionRouteGroupsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TransitionRouteGroupsAsyncClient: The constructed client. + """ + return TransitionRouteGroupsClient.from_service_account_file.__func__(TransitionRouteGroupsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TransitionRouteGroupsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TransitionRouteGroupsTransport: + """Returns the transport used by the client instance. + + Returns: + TransitionRouteGroupsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(TransitionRouteGroupsClient).get_transport_class, + type(TransitionRouteGroupsClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TransitionRouteGroupsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the transition route groups client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TransitionRouteGroupsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TransitionRouteGroupsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_transition_route_groups( + self, + request: Optional[ + Union[transition_route_group.ListTransitionRouteGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransitionRouteGroupsAsyncPager: + r"""Returns the list of all transition route groups in + the specified flow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_transition_route_groups(): + # Create a client + client = dialogflowcx_v3.TransitionRouteGroupsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListTransitionRouteGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transition_route_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListTransitionRouteGroupsRequest, dict]]): + The request object. The request message for + [TransitionRouteGroups.ListTransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.ListTransitionRouteGroups]. + parent (:class:`str`): + Required. The flow to list all transition route groups + for. Format: + ``projects//locations//agents//flows/`` + or \`projects//locations//agents/. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.transition_route_groups.pagers.ListTransitionRouteGroupsAsyncPager: + The response message for + [TransitionRouteGroups.ListTransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.ListTransitionRouteGroups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = transition_route_group.ListTransitionRouteGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_transition_route_groups, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTransitionRouteGroupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_transition_route_group( + self, + request: Optional[ + Union[transition_route_group.GetTransitionRouteGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transition_route_group.TransitionRouteGroup: + r"""Retrieves the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_transition_route_group(): + # Create a client + client = dialogflowcx_v3.TransitionRouteGroupsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetTransitionRouteGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_transition_route_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetTransitionRouteGroupRequest, dict]]): + The request object. The request message for + [TransitionRouteGroups.GetTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.GetTransitionRouteGroup]. + name (:class:`str`): + Required. The name of the + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + Format: + ``projects//locations//agents//flows//transitionRouteGroups/`` + or + ``projects//locations//agents//transitionRouteGroups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TransitionRouteGroup: + A TransitionRouteGroup represents a group of + [TransitionRoutes][google.cloud.dialogflow.cx.v3.TransitionRoute] + to be used by a + [Page][google.cloud.dialogflow.cx.v3.Page]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = transition_route_group.GetTransitionRouteGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_transition_route_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_transition_route_group( + self, + request: Optional[ + Union[gcdc_transition_route_group.CreateTransitionRouteGroupRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + transition_route_group: Optional[ + gcdc_transition_route_group.TransitionRouteGroup + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_transition_route_group.TransitionRouteGroup: + r"""Creates an + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + in the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_transition_route_group(): + # Create a client + client = dialogflowcx_v3.TransitionRouteGroupsAsyncClient() + + # Initialize request argument(s) + transition_route_group = dialogflowcx_v3.TransitionRouteGroup() + transition_route_group.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateTransitionRouteGroupRequest( + parent="parent_value", + transition_route_group=transition_route_group, + ) + + # Make the request + response = await client.create_transition_route_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateTransitionRouteGroupRequest, dict]]): + The request object. The request message for + [TransitionRouteGroups.CreateTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.CreateTransitionRouteGroup]. + parent (:class:`str`): + Required. The flow to create an + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + for. Format: + ``projects//locations//agents//flows/`` + or + ``projects//locations//agents/`` + for agent-level groups. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transition_route_group (:class:`google.cloud.dialogflowcx_v3.types.TransitionRouteGroup`): + Required. The transition route group + to create. + + This corresponds to the ``transition_route_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TransitionRouteGroup: + A TransitionRouteGroup represents a group of + [TransitionRoutes][google.cloud.dialogflow.cx.v3.TransitionRoute] + to be used by a + [Page][google.cloud.dialogflow.cx.v3.Page]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, transition_route_group]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_transition_route_group.CreateTransitionRouteGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if transition_route_group is not None: + request.transition_route_group = transition_route_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_transition_route_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_transition_route_group( + self, + request: Optional[ + Union[gcdc_transition_route_group.UpdateTransitionRouteGroupRequest, dict] + ] = None, + *, + transition_route_group: Optional[ + gcdc_transition_route_group.TransitionRouteGroup + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_transition_route_group.TransitionRouteGroup: + r"""Updates the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_transition_route_group(): + # Create a client + client = dialogflowcx_v3.TransitionRouteGroupsAsyncClient() + + # Initialize request argument(s) + transition_route_group = dialogflowcx_v3.TransitionRouteGroup() + transition_route_group.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateTransitionRouteGroupRequest( + transition_route_group=transition_route_group, + ) + + # Make the request + response = await client.update_transition_route_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateTransitionRouteGroupRequest, dict]]): + The request object. The request message for + [TransitionRouteGroups.UpdateTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.UpdateTransitionRouteGroup]. + transition_route_group (:class:`google.cloud.dialogflowcx_v3.types.TransitionRouteGroup`): + Required. The transition route group + to update. + + This corresponds to the ``transition_route_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TransitionRouteGroup: + A TransitionRouteGroup represents a group of + [TransitionRoutes][google.cloud.dialogflow.cx.v3.TransitionRoute] + to be used by a + [Page][google.cloud.dialogflow.cx.v3.Page]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([transition_route_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_transition_route_group.UpdateTransitionRouteGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if transition_route_group is not None: + request.transition_route_group = transition_route_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_transition_route_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("transition_route_group.name", request.transition_route_group.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_transition_route_group( + self, + request: Optional[ + Union[transition_route_group.DeleteTransitionRouteGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_transition_route_group(): + # Create a client + client = dialogflowcx_v3.TransitionRouteGroupsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteTransitionRouteGroupRequest( + name="name_value", + ) + + # Make the request + await client.delete_transition_route_group(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeleteTransitionRouteGroupRequest, dict]]): + The request object. The request message for + [TransitionRouteGroups.DeleteTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.DeleteTransitionRouteGroup]. + name (:class:`str`): + Required. The name of the + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + to delete. Format: + ``projects//locations//agents//flows//transitionRouteGroups/`` + or + ``projects//locations//agents//transitionRouteGroups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = transition_route_group.DeleteTransitionRouteGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_transition_route_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "TransitionRouteGroupsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TransitionRouteGroupsAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py new file mode 100644 index 000000000000..e104636ce4d7 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py @@ -0,0 +1,1464 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.transition_route_groups import pagers +from google.cloud.dialogflowcx_v3.types import ( + transition_route_group as gcdc_transition_route_group, +) +from google.cloud.dialogflowcx_v3.types import page +from google.cloud.dialogflowcx_v3.types import transition_route_group + +from .transports.base import DEFAULT_CLIENT_INFO, TransitionRouteGroupsTransport +from .transports.grpc import TransitionRouteGroupsGrpcTransport +from .transports.grpc_asyncio import TransitionRouteGroupsGrpcAsyncIOTransport +from .transports.rest import TransitionRouteGroupsRestTransport + + +class TransitionRouteGroupsClientMeta(type): + """Metaclass for the TransitionRouteGroups client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[TransitionRouteGroupsTransport]] + _transport_registry["grpc"] = TransitionRouteGroupsGrpcTransport + _transport_registry["grpc_asyncio"] = TransitionRouteGroupsGrpcAsyncIOTransport + _transport_registry["rest"] = TransitionRouteGroupsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TransitionRouteGroupsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TransitionRouteGroupsClient(metaclass=TransitionRouteGroupsClientMeta): + """Service for managing + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TransitionRouteGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TransitionRouteGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TransitionRouteGroupsTransport: + """Returns the transport used by the client instance. + + Returns: + TransitionRouteGroupsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def flow_path( + project: str, + location: str, + agent: str, + flow: str, + ) -> str: + """Returns a fully-qualified flow string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}".format( + project=project, + location=location, + agent=agent, + flow=flow, + ) + + @staticmethod + def parse_flow_path(path: str) -> Dict[str, str]: + """Parses a flow path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def intent_path( + project: str, + location: str, + agent: str, + intent: str, + ) -> str: + """Returns a fully-qualified intent string.""" + return "projects/{project}/locations/{location}/agents/{agent}/intents/{intent}".format( + project=project, + location=location, + agent=agent, + intent=intent, + ) + + @staticmethod + def parse_intent_path(path: str) -> Dict[str, str]: + """Parses a intent path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/intents/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def page_path( + project: str, + location: str, + agent: str, + flow: str, + page: str, + ) -> str: + """Returns a fully-qualified page string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/pages/{page}".format( + project=project, + location=location, + agent=agent, + flow=flow, + page=page, + ) + + @staticmethod + def parse_page_path(path: str) -> Dict[str, str]: + """Parses a page path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/pages/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def transition_route_group_path( + project: str, + location: str, + agent: str, + flow: str, + transition_route_group: str, + ) -> str: + """Returns a fully-qualified transition_route_group string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/transitionRouteGroups/{transition_route_group}".format( + project=project, + location=location, + agent=agent, + flow=flow, + transition_route_group=transition_route_group, + ) + + @staticmethod + def parse_transition_route_group_path(path: str) -> Dict[str, str]: + """Parses a transition_route_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/transitionRouteGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def webhook_path( + project: str, + location: str, + agent: str, + webhook: str, + ) -> str: + """Returns a fully-qualified webhook string.""" + return "projects/{project}/locations/{location}/agents/{agent}/webhooks/{webhook}".format( + project=project, + location=location, + agent=agent, + webhook=webhook, + ) + + @staticmethod + def parse_webhook_path(path: str) -> Dict[str, str]: + """Parses a webhook path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/webhooks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TransitionRouteGroupsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the transition route groups client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TransitionRouteGroupsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TransitionRouteGroupsTransport): + # transport is a TransitionRouteGroupsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_transition_route_groups( + self, + request: Optional[ + Union[transition_route_group.ListTransitionRouteGroupsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransitionRouteGroupsPager: + r"""Returns the list of all transition route groups in + the specified flow. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_transition_route_groups(): + # Create a client + client = dialogflowcx_v3.TransitionRouteGroupsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListTransitionRouteGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transition_route_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListTransitionRouteGroupsRequest, dict]): + The request object. The request message for + [TransitionRouteGroups.ListTransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.ListTransitionRouteGroups]. + parent (str): + Required. The flow to list all transition route groups + for. Format: + ``projects//locations//agents//flows/`` + or \`projects//locations//agents/. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.transition_route_groups.pagers.ListTransitionRouteGroupsPager: + The response message for + [TransitionRouteGroups.ListTransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.ListTransitionRouteGroups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a transition_route_group.ListTransitionRouteGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, transition_route_group.ListTransitionRouteGroupsRequest + ): + request = transition_route_group.ListTransitionRouteGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_transition_route_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTransitionRouteGroupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_transition_route_group( + self, + request: Optional[ + Union[transition_route_group.GetTransitionRouteGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transition_route_group.TransitionRouteGroup: + r"""Retrieves the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_transition_route_group(): + # Create a client + client = dialogflowcx_v3.TransitionRouteGroupsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetTransitionRouteGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_transition_route_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetTransitionRouteGroupRequest, dict]): + The request object. The request message for + [TransitionRouteGroups.GetTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.GetTransitionRouteGroup]. + name (str): + Required. The name of the + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + Format: + ``projects//locations//agents//flows//transitionRouteGroups/`` + or + ``projects//locations//agents//transitionRouteGroups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TransitionRouteGroup: + A TransitionRouteGroup represents a group of + [TransitionRoutes][google.cloud.dialogflow.cx.v3.TransitionRoute] + to be used by a + [Page][google.cloud.dialogflow.cx.v3.Page]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a transition_route_group.GetTransitionRouteGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, transition_route_group.GetTransitionRouteGroupRequest + ): + request = transition_route_group.GetTransitionRouteGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_transition_route_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_transition_route_group( + self, + request: Optional[ + Union[gcdc_transition_route_group.CreateTransitionRouteGroupRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + transition_route_group: Optional[ + gcdc_transition_route_group.TransitionRouteGroup + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_transition_route_group.TransitionRouteGroup: + r"""Creates an + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + in the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_transition_route_group(): + # Create a client + client = dialogflowcx_v3.TransitionRouteGroupsClient() + + # Initialize request argument(s) + transition_route_group = dialogflowcx_v3.TransitionRouteGroup() + transition_route_group.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateTransitionRouteGroupRequest( + parent="parent_value", + transition_route_group=transition_route_group, + ) + + # Make the request + response = client.create_transition_route_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateTransitionRouteGroupRequest, dict]): + The request object. The request message for + [TransitionRouteGroups.CreateTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.CreateTransitionRouteGroup]. + parent (str): + Required. The flow to create an + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + for. Format: + ``projects//locations//agents//flows/`` + or + ``projects//locations//agents/`` + for agent-level groups. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transition_route_group (google.cloud.dialogflowcx_v3.types.TransitionRouteGroup): + Required. The transition route group + to create. + + This corresponds to the ``transition_route_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TransitionRouteGroup: + A TransitionRouteGroup represents a group of + [TransitionRoutes][google.cloud.dialogflow.cx.v3.TransitionRoute] + to be used by a + [Page][google.cloud.dialogflow.cx.v3.Page]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, transition_route_group]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_transition_route_group.CreateTransitionRouteGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, gcdc_transition_route_group.CreateTransitionRouteGroupRequest + ): + request = gcdc_transition_route_group.CreateTransitionRouteGroupRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if transition_route_group is not None: + request.transition_route_group = transition_route_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_transition_route_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_transition_route_group( + self, + request: Optional[ + Union[gcdc_transition_route_group.UpdateTransitionRouteGroupRequest, dict] + ] = None, + *, + transition_route_group: Optional[ + gcdc_transition_route_group.TransitionRouteGroup + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_transition_route_group.TransitionRouteGroup: + r"""Updates the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_transition_route_group(): + # Create a client + client = dialogflowcx_v3.TransitionRouteGroupsClient() + + # Initialize request argument(s) + transition_route_group = dialogflowcx_v3.TransitionRouteGroup() + transition_route_group.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateTransitionRouteGroupRequest( + transition_route_group=transition_route_group, + ) + + # Make the request + response = client.update_transition_route_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateTransitionRouteGroupRequest, dict]): + The request object. The request message for + [TransitionRouteGroups.UpdateTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.UpdateTransitionRouteGroup]. + transition_route_group (google.cloud.dialogflowcx_v3.types.TransitionRouteGroup): + Required. The transition route group + to update. + + This corresponds to the ``transition_route_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.TransitionRouteGroup: + A TransitionRouteGroup represents a group of + [TransitionRoutes][google.cloud.dialogflow.cx.v3.TransitionRoute] + to be used by a + [Page][google.cloud.dialogflow.cx.v3.Page]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([transition_route_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_transition_route_group.UpdateTransitionRouteGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, gcdc_transition_route_group.UpdateTransitionRouteGroupRequest + ): + request = gcdc_transition_route_group.UpdateTransitionRouteGroupRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if transition_route_group is not None: + request.transition_route_group = transition_route_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_transition_route_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("transition_route_group.name", request.transition_route_group.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_transition_route_group( + self, + request: Optional[ + Union[transition_route_group.DeleteTransitionRouteGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_transition_route_group(): + # Create a client + client = dialogflowcx_v3.TransitionRouteGroupsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteTransitionRouteGroupRequest( + name="name_value", + ) + + # Make the request + client.delete_transition_route_group(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeleteTransitionRouteGroupRequest, dict]): + The request object. The request message for + [TransitionRouteGroups.DeleteTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.DeleteTransitionRouteGroup]. + name (str): + Required. The name of the + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + to delete. Format: + ``projects//locations//agents//flows//transitionRouteGroups/`` + or + ``projects//locations//agents//transitionRouteGroups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a transition_route_group.DeleteTransitionRouteGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, transition_route_group.DeleteTransitionRouteGroupRequest + ): + request = transition_route_group.DeleteTransitionRouteGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_transition_route_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "TransitionRouteGroupsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TransitionRouteGroupsClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/pagers.py new file mode 100644 index 000000000000..9cb022ae5467 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/pagers.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import transition_route_group + + +class ListTransitionRouteGroupsPager: + """A pager for iterating through ``list_transition_route_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListTransitionRouteGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``transition_route_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTransitionRouteGroups`` requests and continue to iterate + through the ``transition_route_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListTransitionRouteGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., transition_route_group.ListTransitionRouteGroupsResponse], + request: transition_route_group.ListTransitionRouteGroupsRequest, + response: transition_route_group.ListTransitionRouteGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListTransitionRouteGroupsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListTransitionRouteGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = transition_route_group.ListTransitionRouteGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[transition_route_group.ListTransitionRouteGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[transition_route_group.TransitionRouteGroup]: + for page in self.pages: + yield from page.transition_route_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransitionRouteGroupsAsyncPager: + """A pager for iterating through ``list_transition_route_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListTransitionRouteGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``transition_route_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTransitionRouteGroups`` requests and continue to iterate + through the ``transition_route_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListTransitionRouteGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[transition_route_group.ListTransitionRouteGroupsResponse] + ], + request: transition_route_group.ListTransitionRouteGroupsRequest, + response: transition_route_group.ListTransitionRouteGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListTransitionRouteGroupsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListTransitionRouteGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = transition_route_group.ListTransitionRouteGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[transition_route_group.ListTransitionRouteGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[transition_route_group.TransitionRouteGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.transition_route_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/__init__.py new file mode 100644 index 000000000000..f61d21babfb2 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TransitionRouteGroupsTransport +from .grpc import TransitionRouteGroupsGrpcTransport +from .grpc_asyncio import TransitionRouteGroupsGrpcAsyncIOTransport +from .rest import ( + TransitionRouteGroupsRestInterceptor, + TransitionRouteGroupsRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[TransitionRouteGroupsTransport]] +_transport_registry["grpc"] = TransitionRouteGroupsGrpcTransport +_transport_registry["grpc_asyncio"] = TransitionRouteGroupsGrpcAsyncIOTransport +_transport_registry["rest"] = TransitionRouteGroupsRestTransport + +__all__ = ( + "TransitionRouteGroupsTransport", + "TransitionRouteGroupsGrpcTransport", + "TransitionRouteGroupsGrpcAsyncIOTransport", + "TransitionRouteGroupsRestTransport", + "TransitionRouteGroupsRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/base.py new file mode 100644 index 000000000000..9ad673043149 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/base.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import ( + transition_route_group as gcdc_transition_route_group, +) +from google.cloud.dialogflowcx_v3.types import transition_route_group + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TransitionRouteGroupsTransport(abc.ABC): + """Abstract transport class for TransitionRouteGroups.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_transition_route_groups: gapic_v1.method.wrap_method( + self.list_transition_route_groups, + default_timeout=None, + client_info=client_info, + ), + self.get_transition_route_group: gapic_v1.method.wrap_method( + self.get_transition_route_group, + default_timeout=None, + client_info=client_info, + ), + self.create_transition_route_group: gapic_v1.method.wrap_method( + self.create_transition_route_group, + default_timeout=None, + client_info=client_info, + ), + self.update_transition_route_group: gapic_v1.method.wrap_method( + self.update_transition_route_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_transition_route_group: gapic_v1.method.wrap_method( + self.delete_transition_route_group, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_transition_route_groups( + self, + ) -> Callable[ + [transition_route_group.ListTransitionRouteGroupsRequest], + Union[ + transition_route_group.ListTransitionRouteGroupsResponse, + Awaitable[transition_route_group.ListTransitionRouteGroupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_transition_route_group( + self, + ) -> Callable[ + [transition_route_group.GetTransitionRouteGroupRequest], + Union[ + transition_route_group.TransitionRouteGroup, + Awaitable[transition_route_group.TransitionRouteGroup], + ], + ]: + raise NotImplementedError() + + @property + def create_transition_route_group( + self, + ) -> Callable[ + [gcdc_transition_route_group.CreateTransitionRouteGroupRequest], + Union[ + gcdc_transition_route_group.TransitionRouteGroup, + Awaitable[gcdc_transition_route_group.TransitionRouteGroup], + ], + ]: + raise NotImplementedError() + + @property + def update_transition_route_group( + self, + ) -> Callable[ + [gcdc_transition_route_group.UpdateTransitionRouteGroupRequest], + Union[ + gcdc_transition_route_group.TransitionRouteGroup, + Awaitable[gcdc_transition_route_group.TransitionRouteGroup], + ], + ]: + raise NotImplementedError() + + @property + def delete_transition_route_group( + self, + ) -> Callable[ + [transition_route_group.DeleteTransitionRouteGroupRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TransitionRouteGroupsTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/grpc.py new file mode 100644 index 000000000000..fedeb931cdf5 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/grpc.py @@ -0,0 +1,503 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + transition_route_group as gcdc_transition_route_group, +) +from google.cloud.dialogflowcx_v3.types import transition_route_group + +from .base import DEFAULT_CLIENT_INFO, TransitionRouteGroupsTransport + + +class TransitionRouteGroupsGrpcTransport(TransitionRouteGroupsTransport): + """gRPC backend transport for TransitionRouteGroups. + + Service for managing + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_transition_route_groups( + self, + ) -> Callable[ + [transition_route_group.ListTransitionRouteGroupsRequest], + transition_route_group.ListTransitionRouteGroupsResponse, + ]: + r"""Return a callable for the list transition route groups method over gRPC. + + Returns the list of all transition route groups in + the specified flow. + + Returns: + Callable[[~.ListTransitionRouteGroupsRequest], + ~.ListTransitionRouteGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transition_route_groups" not in self._stubs: + self._stubs["list_transition_route_groups"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TransitionRouteGroups/ListTransitionRouteGroups", + request_serializer=transition_route_group.ListTransitionRouteGroupsRequest.serialize, + response_deserializer=transition_route_group.ListTransitionRouteGroupsResponse.deserialize, + ) + return self._stubs["list_transition_route_groups"] + + @property + def get_transition_route_group( + self, + ) -> Callable[ + [transition_route_group.GetTransitionRouteGroupRequest], + transition_route_group.TransitionRouteGroup, + ]: + r"""Return a callable for the get transition route group method over gRPC. + + Retrieves the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + Returns: + Callable[[~.GetTransitionRouteGroupRequest], + ~.TransitionRouteGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transition_route_group" not in self._stubs: + self._stubs["get_transition_route_group"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TransitionRouteGroups/GetTransitionRouteGroup", + request_serializer=transition_route_group.GetTransitionRouteGroupRequest.serialize, + response_deserializer=transition_route_group.TransitionRouteGroup.deserialize, + ) + return self._stubs["get_transition_route_group"] + + @property + def create_transition_route_group( + self, + ) -> Callable[ + [gcdc_transition_route_group.CreateTransitionRouteGroupRequest], + gcdc_transition_route_group.TransitionRouteGroup, + ]: + r"""Return a callable for the create transition route group method over gRPC. + + Creates an + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + in the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreateTransitionRouteGroupRequest], + ~.TransitionRouteGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_transition_route_group" not in self._stubs: + self._stubs[ + "create_transition_route_group" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TransitionRouteGroups/CreateTransitionRouteGroup", + request_serializer=gcdc_transition_route_group.CreateTransitionRouteGroupRequest.serialize, + response_deserializer=gcdc_transition_route_group.TransitionRouteGroup.deserialize, + ) + return self._stubs["create_transition_route_group"] + + @property + def update_transition_route_group( + self, + ) -> Callable[ + [gcdc_transition_route_group.UpdateTransitionRouteGroupRequest], + gcdc_transition_route_group.TransitionRouteGroup, + ]: + r"""Return a callable for the update transition route group method over gRPC. + + Updates the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdateTransitionRouteGroupRequest], + ~.TransitionRouteGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_transition_route_group" not in self._stubs: + self._stubs[ + "update_transition_route_group" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TransitionRouteGroups/UpdateTransitionRouteGroup", + request_serializer=gcdc_transition_route_group.UpdateTransitionRouteGroupRequest.serialize, + response_deserializer=gcdc_transition_route_group.TransitionRouteGroup.deserialize, + ) + return self._stubs["update_transition_route_group"] + + @property + def delete_transition_route_group( + self, + ) -> Callable[ + [transition_route_group.DeleteTransitionRouteGroupRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete transition route group method over gRPC. + + Deletes the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.DeleteTransitionRouteGroupRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_transition_route_group" not in self._stubs: + self._stubs[ + "delete_transition_route_group" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TransitionRouteGroups/DeleteTransitionRouteGroup", + request_serializer=transition_route_group.DeleteTransitionRouteGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_transition_route_group"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TransitionRouteGroupsGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/grpc_asyncio.py new file mode 100644 index 000000000000..0f10911b2bd9 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/grpc_asyncio.py @@ -0,0 +1,503 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + transition_route_group as gcdc_transition_route_group, +) +from google.cloud.dialogflowcx_v3.types import transition_route_group + +from .base import DEFAULT_CLIENT_INFO, TransitionRouteGroupsTransport +from .grpc import TransitionRouteGroupsGrpcTransport + + +class TransitionRouteGroupsGrpcAsyncIOTransport(TransitionRouteGroupsTransport): + """gRPC AsyncIO backend transport for TransitionRouteGroups. + + Service for managing + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_transition_route_groups( + self, + ) -> Callable[ + [transition_route_group.ListTransitionRouteGroupsRequest], + Awaitable[transition_route_group.ListTransitionRouteGroupsResponse], + ]: + r"""Return a callable for the list transition route groups method over gRPC. + + Returns the list of all transition route groups in + the specified flow. + + Returns: + Callable[[~.ListTransitionRouteGroupsRequest], + Awaitable[~.ListTransitionRouteGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transition_route_groups" not in self._stubs: + self._stubs["list_transition_route_groups"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TransitionRouteGroups/ListTransitionRouteGroups", + request_serializer=transition_route_group.ListTransitionRouteGroupsRequest.serialize, + response_deserializer=transition_route_group.ListTransitionRouteGroupsResponse.deserialize, + ) + return self._stubs["list_transition_route_groups"] + + @property + def get_transition_route_group( + self, + ) -> Callable[ + [transition_route_group.GetTransitionRouteGroupRequest], + Awaitable[transition_route_group.TransitionRouteGroup], + ]: + r"""Return a callable for the get transition route group method over gRPC. + + Retrieves the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + Returns: + Callable[[~.GetTransitionRouteGroupRequest], + Awaitable[~.TransitionRouteGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transition_route_group" not in self._stubs: + self._stubs["get_transition_route_group"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TransitionRouteGroups/GetTransitionRouteGroup", + request_serializer=transition_route_group.GetTransitionRouteGroupRequest.serialize, + response_deserializer=transition_route_group.TransitionRouteGroup.deserialize, + ) + return self._stubs["get_transition_route_group"] + + @property + def create_transition_route_group( + self, + ) -> Callable[ + [gcdc_transition_route_group.CreateTransitionRouteGroupRequest], + Awaitable[gcdc_transition_route_group.TransitionRouteGroup], + ]: + r"""Return a callable for the create transition route group method over gRPC. + + Creates an + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + in the specified flow. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.CreateTransitionRouteGroupRequest], + Awaitable[~.TransitionRouteGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_transition_route_group" not in self._stubs: + self._stubs[ + "create_transition_route_group" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TransitionRouteGroups/CreateTransitionRouteGroup", + request_serializer=gcdc_transition_route_group.CreateTransitionRouteGroupRequest.serialize, + response_deserializer=gcdc_transition_route_group.TransitionRouteGroup.deserialize, + ) + return self._stubs["create_transition_route_group"] + + @property + def update_transition_route_group( + self, + ) -> Callable[ + [gcdc_transition_route_group.UpdateTransitionRouteGroupRequest], + Awaitable[gcdc_transition_route_group.TransitionRouteGroup], + ]: + r"""Return a callable for the update transition route group method over gRPC. + + Updates the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.UpdateTransitionRouteGroupRequest], + Awaitable[~.TransitionRouteGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_transition_route_group" not in self._stubs: + self._stubs[ + "update_transition_route_group" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TransitionRouteGroups/UpdateTransitionRouteGroup", + request_serializer=gcdc_transition_route_group.UpdateTransitionRouteGroupRequest.serialize, + response_deserializer=gcdc_transition_route_group.TransitionRouteGroup.deserialize, + ) + return self._stubs["update_transition_route_group"] + + @property + def delete_transition_route_group( + self, + ) -> Callable[ + [transition_route_group.DeleteTransitionRouteGroupRequest], + Awaitable[empty_pb2.Empty], + ]: + r"""Return a callable for the delete transition route group method over gRPC. + + Deletes the specified + [TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + Note: You should always train a flow prior to sending it + queries. See the `training + documentation `__. + + Returns: + Callable[[~.DeleteTransitionRouteGroupRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_transition_route_group" not in self._stubs: + self._stubs[ + "delete_transition_route_group" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.TransitionRouteGroups/DeleteTransitionRouteGroup", + request_serializer=transition_route_group.DeleteTransitionRouteGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_transition_route_group"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("TransitionRouteGroupsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/rest.py new file mode 100644 index 000000000000..b72b8986b174 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/rest.py @@ -0,0 +1,1355 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + transition_route_group as gcdc_transition_route_group, +) +from google.cloud.dialogflowcx_v3.types import transition_route_group + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TransitionRouteGroupsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TransitionRouteGroupsRestInterceptor: + """Interceptor for TransitionRouteGroups. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TransitionRouteGroupsRestTransport. + + .. code-block:: python + class MyCustomTransitionRouteGroupsInterceptor(TransitionRouteGroupsRestInterceptor): + def pre_create_transition_route_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_transition_route_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_transition_route_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_transition_route_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_transition_route_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_transition_route_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_transition_route_groups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_transition_route_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_transition_route_group(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TransitionRouteGroupsRestTransport(interceptor=MyCustomTransitionRouteGroupsInterceptor()) + client = TransitionRouteGroupsClient(transport=transport) + + + """ + + def pre_create_transition_route_group( + self, + request: gcdc_transition_route_group.CreateTransitionRouteGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + gcdc_transition_route_group.CreateTransitionRouteGroupRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_transition_route_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the TransitionRouteGroups server. + """ + return request, metadata + + def post_create_transition_route_group( + self, response: gcdc_transition_route_group.TransitionRouteGroup + ) -> gcdc_transition_route_group.TransitionRouteGroup: + """Post-rpc interceptor for create_transition_route_group + + Override in a subclass to manipulate the response + after it is returned by the TransitionRouteGroups server but before + it is returned to user code. + """ + return response + + def pre_delete_transition_route_group( + self, + request: transition_route_group.DeleteTransitionRouteGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + transition_route_group.DeleteTransitionRouteGroupRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_transition_route_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the TransitionRouteGroups server. + """ + return request, metadata + + def pre_get_transition_route_group( + self, + request: transition_route_group.GetTransitionRouteGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + transition_route_group.GetTransitionRouteGroupRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_transition_route_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the TransitionRouteGroups server. + """ + return request, metadata + + def post_get_transition_route_group( + self, response: transition_route_group.TransitionRouteGroup + ) -> transition_route_group.TransitionRouteGroup: + """Post-rpc interceptor for get_transition_route_group + + Override in a subclass to manipulate the response + after it is returned by the TransitionRouteGroups server but before + it is returned to user code. + """ + return response + + def pre_list_transition_route_groups( + self, + request: transition_route_group.ListTransitionRouteGroupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + transition_route_group.ListTransitionRouteGroupsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_transition_route_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the TransitionRouteGroups server. + """ + return request, metadata + + def post_list_transition_route_groups( + self, response: transition_route_group.ListTransitionRouteGroupsResponse + ) -> transition_route_group.ListTransitionRouteGroupsResponse: + """Post-rpc interceptor for list_transition_route_groups + + Override in a subclass to manipulate the response + after it is returned by the TransitionRouteGroups server but before + it is returned to user code. + """ + return response + + def pre_update_transition_route_group( + self, + request: gcdc_transition_route_group.UpdateTransitionRouteGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + gcdc_transition_route_group.UpdateTransitionRouteGroupRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for update_transition_route_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the TransitionRouteGroups server. + """ + return request, metadata + + def post_update_transition_route_group( + self, response: gcdc_transition_route_group.TransitionRouteGroup + ) -> gcdc_transition_route_group.TransitionRouteGroup: + """Post-rpc interceptor for update_transition_route_group + + Override in a subclass to manipulate the response + after it is returned by the TransitionRouteGroups server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the TransitionRouteGroups server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the TransitionRouteGroups server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the TransitionRouteGroups server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the TransitionRouteGroups server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TransitionRouteGroups server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the TransitionRouteGroups server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TransitionRouteGroups server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the TransitionRouteGroups server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the TransitionRouteGroups server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the TransitionRouteGroups server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TransitionRouteGroupsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TransitionRouteGroupsRestInterceptor + + +class TransitionRouteGroupsRestTransport(TransitionRouteGroupsTransport): + """REST backend transport for TransitionRouteGroups. + + Service for managing + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TransitionRouteGroupsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TransitionRouteGroupsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateTransitionRouteGroup(TransitionRouteGroupsRestStub): + def __hash__(self): + return hash("CreateTransitionRouteGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_transition_route_group.CreateTransitionRouteGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_transition_route_group.TransitionRouteGroup: + r"""Call the create transition route + group method over HTTP. + + Args: + request (~.gcdc_transition_route_group.CreateTransitionRouteGroupRequest): + The request object. The request message for + [TransitionRouteGroups.CreateTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.CreateTransitionRouteGroup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_transition_route_group.TransitionRouteGroup: + A TransitionRouteGroup represents a group of + [``TransitionRoutes``][google.cloud.dialogflow.cx.v3.TransitionRoute] + to be used by a + [Page][google.cloud.dialogflow.cx.v3.Page]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/flows/*}/transitionRouteGroups", + "body": "transition_route_group", + }, + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/transitionRouteGroups", + "body": "transition_route_group", + }, + ] + request, metadata = self._interceptor.pre_create_transition_route_group( + request, metadata + ) + pb_request = ( + gcdc_transition_route_group.CreateTransitionRouteGroupRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_transition_route_group.TransitionRouteGroup() + pb_resp = gcdc_transition_route_group.TransitionRouteGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_transition_route_group(resp) + return resp + + class _DeleteTransitionRouteGroup(TransitionRouteGroupsRestStub): + def __hash__(self): + return hash("DeleteTransitionRouteGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: transition_route_group.DeleteTransitionRouteGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete transition route + group method over HTTP. + + Args: + request (~.transition_route_group.DeleteTransitionRouteGroupRequest): + The request object. The request message for + [TransitionRouteGroups.DeleteTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.DeleteTransitionRouteGroup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*/transitionRouteGroups/*}", + }, + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/transitionRouteGroups/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_transition_route_group( + request, metadata + ) + pb_request = transition_route_group.DeleteTransitionRouteGroupRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetTransitionRouteGroup(TransitionRouteGroupsRestStub): + def __hash__(self): + return hash("GetTransitionRouteGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: transition_route_group.GetTransitionRouteGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transition_route_group.TransitionRouteGroup: + r"""Call the get transition route + group method over HTTP. + + Args: + request (~.transition_route_group.GetTransitionRouteGroupRequest): + The request object. The request message for + [TransitionRouteGroups.GetTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.GetTransitionRouteGroup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transition_route_group.TransitionRouteGroup: + A TransitionRouteGroup represents a group of + [``TransitionRoutes``][google.cloud.dialogflow.cx.v3.TransitionRoute] + to be used by a + [Page][google.cloud.dialogflow.cx.v3.Page]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*/transitionRouteGroups/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/transitionRouteGroups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_transition_route_group( + request, metadata + ) + pb_request = transition_route_group.GetTransitionRouteGroupRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transition_route_group.TransitionRouteGroup() + pb_resp = transition_route_group.TransitionRouteGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_transition_route_group(resp) + return resp + + class _ListTransitionRouteGroups(TransitionRouteGroupsRestStub): + def __hash__(self): + return hash("ListTransitionRouteGroups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: transition_route_group.ListTransitionRouteGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transition_route_group.ListTransitionRouteGroupsResponse: + r"""Call the list transition route + groups method over HTTP. + + Args: + request (~.transition_route_group.ListTransitionRouteGroupsRequest): + The request object. The request message for + [TransitionRouteGroups.ListTransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.ListTransitionRouteGroups]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transition_route_group.ListTransitionRouteGroupsResponse: + The response message for + [TransitionRouteGroups.ListTransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.ListTransitionRouteGroups]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/flows/*}/transitionRouteGroups", + }, + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/transitionRouteGroups", + }, + ] + request, metadata = self._interceptor.pre_list_transition_route_groups( + request, metadata + ) + pb_request = transition_route_group.ListTransitionRouteGroupsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transition_route_group.ListTransitionRouteGroupsResponse() + pb_resp = transition_route_group.ListTransitionRouteGroupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_transition_route_groups(resp) + return resp + + class _UpdateTransitionRouteGroup(TransitionRouteGroupsRestStub): + def __hash__(self): + return hash("UpdateTransitionRouteGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_transition_route_group.UpdateTransitionRouteGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_transition_route_group.TransitionRouteGroup: + r"""Call the update transition route + group method over HTTP. + + Args: + request (~.gcdc_transition_route_group.UpdateTransitionRouteGroupRequest): + The request object. The request message for + [TransitionRouteGroups.UpdateTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.UpdateTransitionRouteGroup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_transition_route_group.TransitionRouteGroup: + A TransitionRouteGroup represents a group of + [``TransitionRoutes``][google.cloud.dialogflow.cx.v3.TransitionRoute] + to be used by a + [Page][google.cloud.dialogflow.cx.v3.Page]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{transition_route_group.name=projects/*/locations/*/agents/*/flows/*/transitionRouteGroups/*}", + "body": "transition_route_group", + }, + { + "method": "patch", + "uri": "/v3/{transition_route_group.name=projects/*/locations/*/agents/*/transitionRouteGroups/*}", + "body": "transition_route_group", + }, + ] + request, metadata = self._interceptor.pre_update_transition_route_group( + request, metadata + ) + pb_request = ( + gcdc_transition_route_group.UpdateTransitionRouteGroupRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_transition_route_group.TransitionRouteGroup() + pb_resp = gcdc_transition_route_group.TransitionRouteGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_transition_route_group(resp) + return resp + + @property + def create_transition_route_group( + self, + ) -> Callable[ + [gcdc_transition_route_group.CreateTransitionRouteGroupRequest], + gcdc_transition_route_group.TransitionRouteGroup, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTransitionRouteGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_transition_route_group( + self, + ) -> Callable[ + [transition_route_group.DeleteTransitionRouteGroupRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTransitionRouteGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_transition_route_group( + self, + ) -> Callable[ + [transition_route_group.GetTransitionRouteGroupRequest], + transition_route_group.TransitionRouteGroup, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTransitionRouteGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_transition_route_groups( + self, + ) -> Callable[ + [transition_route_group.ListTransitionRouteGroupsRequest], + transition_route_group.ListTransitionRouteGroupsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTransitionRouteGroups(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_transition_route_group( + self, + ) -> Callable[ + [gcdc_transition_route_group.UpdateTransitionRouteGroupRequest], + gcdc_transition_route_group.TransitionRouteGroup, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTransitionRouteGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(TransitionRouteGroupsRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(TransitionRouteGroupsRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(TransitionRouteGroupsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(TransitionRouteGroupsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(TransitionRouteGroupsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TransitionRouteGroupsRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/__init__.py new file mode 100644 index 000000000000..107272199acc --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import VersionsAsyncClient +from .client import VersionsClient + +__all__ = ( + "VersionsClient", + "VersionsAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py new file mode 100644 index 000000000000..0c07daab1ffa --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/async_client.py @@ -0,0 +1,1321 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.versions import pagers +from google.cloud.dialogflowcx_v3.types import flow +from google.cloud.dialogflowcx_v3.types import version +from google.cloud.dialogflowcx_v3.types import version as gcdc_version + +from .client import VersionsClient +from .transports.base import DEFAULT_CLIENT_INFO, VersionsTransport +from .transports.grpc_asyncio import VersionsGrpcAsyncIOTransport + + +class VersionsAsyncClient: + """Service for managing + [Versions][google.cloud.dialogflow.cx.v3.Version]. + """ + + _client: VersionsClient + + DEFAULT_ENDPOINT = VersionsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = VersionsClient.DEFAULT_MTLS_ENDPOINT + + version_path = staticmethod(VersionsClient.version_path) + parse_version_path = staticmethod(VersionsClient.parse_version_path) + common_billing_account_path = staticmethod( + VersionsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + VersionsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(VersionsClient.common_folder_path) + parse_common_folder_path = staticmethod(VersionsClient.parse_common_folder_path) + common_organization_path = staticmethod(VersionsClient.common_organization_path) + parse_common_organization_path = staticmethod( + VersionsClient.parse_common_organization_path + ) + common_project_path = staticmethod(VersionsClient.common_project_path) + parse_common_project_path = staticmethod(VersionsClient.parse_common_project_path) + common_location_path = staticmethod(VersionsClient.common_location_path) + parse_common_location_path = staticmethod(VersionsClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VersionsAsyncClient: The constructed client. + """ + return VersionsClient.from_service_account_info.__func__(VersionsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VersionsAsyncClient: The constructed client. + """ + return VersionsClient.from_service_account_file.__func__(VersionsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return VersionsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> VersionsTransport: + """Returns the transport used by the client instance. + + Returns: + VersionsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(VersionsClient).get_transport_class, type(VersionsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, VersionsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the versions client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.VersionsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = VersionsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_versions( + self, + request: Optional[Union[version.ListVersionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVersionsAsyncPager: + r"""Returns the list of all versions in the specified + [Flow][google.cloud.dialogflow.cx.v3.Flow]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_versions(): + # Create a client + client = dialogflowcx_v3.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_versions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListVersionsRequest, dict]]): + The request object. The request message for + [Versions.ListVersions][google.cloud.dialogflow.cx.v3.Versions.ListVersions]. + parent (:class:`str`): + Required. The [Flow][google.cloud.dialogflow.cx.v3.Flow] + to list all versions for. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.versions.pagers.ListVersionsAsyncPager: + The response message for + [Versions.ListVersions][google.cloud.dialogflow.cx.v3.Versions.ListVersions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = version.ListVersionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_versions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListVersionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_version( + self, + request: Optional[Union[version.GetVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> version.Version: + r"""Retrieves the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_version(): + # Create a client + client = dialogflowcx_v3.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetVersionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_version(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetVersionRequest, dict]]): + The request object. The request message for + [Versions.GetVersion][google.cloud.dialogflow.cx.v3.Versions.GetVersion]. + name (:class:`str`): + Required. The name of the + [Version][google.cloud.dialogflow.cx.v3.Version]. + Format: + ``projects//locations//agents//flows//versions/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Version: + Represents a version of a flow. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = version.GetVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_version, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_version( + self, + request: Optional[Union[gcdc_version.CreateVersionRequest, dict]] = None, + *, + parent: Optional[str] = None, + version: Optional[gcdc_version.Version] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a [Version][google.cloud.dialogflow.cx.v3.Version] in + the specified [Flow][google.cloud.dialogflow.cx.v3.Flow]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [CreateVersionOperationMetadata][google.cloud.dialogflow.cx.v3.CreateVersionOperationMetadata] + - ``response``: + [Version][google.cloud.dialogflow.cx.v3.Version] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_version(): + # Create a client + client = dialogflowcx_v3.VersionsAsyncClient() + + # Initialize request argument(s) + version = dialogflowcx_v3.Version() + version.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateVersionRequest( + parent="parent_value", + version=version, + ) + + # Make the request + operation = client.create_version(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateVersionRequest, dict]]): + The request object. The request message for + [Versions.CreateVersion][google.cloud.dialogflow.cx.v3.Versions.CreateVersion]. + parent (:class:`str`): + Required. The [Flow][google.cloud.dialogflow.cx.v3.Flow] + to create an + [Version][google.cloud.dialogflow.cx.v3.Version] for. + Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + version (:class:`google.cloud.dialogflowcx_v3.types.Version`): + Required. The version to create. + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dialogflowcx_v3.types.Version` + Represents a version of a flow. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, version]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_version.CreateVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if version is not None: + request.version = version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_version, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcdc_version.Version, + metadata_type=gcdc_version.CreateVersionOperationMetadata, + ) + + # Done; return the response. + return response + + async def update_version( + self, + request: Optional[Union[gcdc_version.UpdateVersionRequest, dict]] = None, + *, + version: Optional[gcdc_version.Version] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_version.Version: + r"""Updates the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_version(): + # Create a client + client = dialogflowcx_v3.VersionsAsyncClient() + + # Initialize request argument(s) + version = dialogflowcx_v3.Version() + version.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateVersionRequest( + version=version, + ) + + # Make the request + response = await client.update_version(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateVersionRequest, dict]]): + The request object. The request message for + [Versions.UpdateVersion][google.cloud.dialogflow.cx.v3.Versions.UpdateVersion]. + version (:class:`google.cloud.dialogflowcx_v3.types.Version`): + Required. The version to update. + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The mask to control which fields get updated. + Currently only ``description`` and ``display_name`` can + be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Version: + Represents a version of a flow. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([version, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_version.UpdateVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if version is not None: + request.version = version + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_version, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("version.name", request.version.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_version( + self, + request: Optional[Union[version.DeleteVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_version(): + # Create a client + client = dialogflowcx_v3.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteVersionRequest( + name="name_value", + ) + + # Make the request + await client.delete_version(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeleteVersionRequest, dict]]): + The request object. The request message for + [Versions.DeleteVersion][google.cloud.dialogflow.cx.v3.Versions.DeleteVersion]. + name (:class:`str`): + Required. The name of the + [Version][google.cloud.dialogflow.cx.v3.Version] to + delete. Format: + ``projects//locations//agents//flows//versions/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = version.DeleteVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_version, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def load_version( + self, + request: Optional[Union[version.LoadVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Loads resources in the specified version to the draft flow. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_load_version(): + # Create a client + client = dialogflowcx_v3.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.LoadVersionRequest( + name="name_value", + ) + + # Make the request + operation = client.load_version(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.LoadVersionRequest, dict]]): + The request object. The request message for + [Versions.LoadVersion][google.cloud.dialogflow.cx.v3.Versions.LoadVersion]. + name (:class:`str`): + Required. The + [Version][google.cloud.dialogflow.cx.v3.Version] to be + loaded to draft flow. Format: + ``projects//locations//agents//flows//versions/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = version.LoadVersionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.load_version, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + async def compare_versions( + self, + request: Optional[Union[version.CompareVersionsRequest, dict]] = None, + *, + base_version: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> version.CompareVersionsResponse: + r"""Compares the specified base version with target + version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_compare_versions(): + # Create a client + client = dialogflowcx_v3.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.CompareVersionsRequest( + base_version="base_version_value", + target_version="target_version_value", + ) + + # Make the request + response = await client.compare_versions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CompareVersionsRequest, dict]]): + The request object. The request message for + [Versions.CompareVersions][google.cloud.dialogflow.cx.v3.Versions.CompareVersions]. + base_version (:class:`str`): + Required. Name of the base flow version to compare with + the target version. Use version ID ``0`` to indicate the + draft version of the specified flow. + + Format: + ``projects//locations//agents/ /flows//versions/``. + + This corresponds to the ``base_version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.CompareVersionsResponse: + The response message for + [Versions.CompareVersions][google.cloud.dialogflow.cx.v3.Versions.CompareVersions]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([base_version]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = version.CompareVersionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if base_version is not None: + request.base_version = base_version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.compare_versions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("base_version", request.base_version),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "VersionsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VersionsAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py new file mode 100644 index 000000000000..57617919a687 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py @@ -0,0 +1,1567 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.versions import pagers +from google.cloud.dialogflowcx_v3.types import flow +from google.cloud.dialogflowcx_v3.types import version +from google.cloud.dialogflowcx_v3.types import version as gcdc_version + +from .transports.base import DEFAULT_CLIENT_INFO, VersionsTransport +from .transports.grpc import VersionsGrpcTransport +from .transports.grpc_asyncio import VersionsGrpcAsyncIOTransport +from .transports.rest import VersionsRestTransport + + +class VersionsClientMeta(type): + """Metaclass for the Versions client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[VersionsTransport]] + _transport_registry["grpc"] = VersionsGrpcTransport + _transport_registry["grpc_asyncio"] = VersionsGrpcAsyncIOTransport + _transport_registry["rest"] = VersionsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[VersionsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class VersionsClient(metaclass=VersionsClientMeta): + """Service for managing + [Versions][google.cloud.dialogflow.cx.v3.Version]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VersionsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VersionsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> VersionsTransport: + """Returns the transport used by the client instance. + + Returns: + VersionsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def version_path( + project: str, + location: str, + agent: str, + flow: str, + version: str, + ) -> str: + """Returns a fully-qualified version string.""" + return "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}/versions/{version}".format( + project=project, + location=location, + agent=agent, + flow=flow, + version=version, + ) + + @staticmethod + def parse_version_path(path: str) -> Dict[str, str]: + """Parses a version path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/flows/(?P.+?)/versions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, VersionsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the versions client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, VersionsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, VersionsTransport): + # transport is a VersionsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_versions( + self, + request: Optional[Union[version.ListVersionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVersionsPager: + r"""Returns the list of all versions in the specified + [Flow][google.cloud.dialogflow.cx.v3.Flow]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_versions(): + # Create a client + client = dialogflowcx_v3.VersionsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListVersionsRequest, dict]): + The request object. The request message for + [Versions.ListVersions][google.cloud.dialogflow.cx.v3.Versions.ListVersions]. + parent (str): + Required. The [Flow][google.cloud.dialogflow.cx.v3.Flow] + to list all versions for. Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.versions.pagers.ListVersionsPager: + The response message for + [Versions.ListVersions][google.cloud.dialogflow.cx.v3.Versions.ListVersions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a version.ListVersionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, version.ListVersionsRequest): + request = version.ListVersionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_versions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListVersionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_version( + self, + request: Optional[Union[version.GetVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> version.Version: + r"""Retrieves the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_version(): + # Create a client + client = dialogflowcx_v3.VersionsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetVersionRequest( + name="name_value", + ) + + # Make the request + response = client.get_version(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetVersionRequest, dict]): + The request object. The request message for + [Versions.GetVersion][google.cloud.dialogflow.cx.v3.Versions.GetVersion]. + name (str): + Required. The name of the + [Version][google.cloud.dialogflow.cx.v3.Version]. + Format: + ``projects//locations//agents//flows//versions/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Version: + Represents a version of a flow. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a version.GetVersionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, version.GetVersionRequest): + request = version.GetVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_version] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_version( + self, + request: Optional[Union[gcdc_version.CreateVersionRequest, dict]] = None, + *, + parent: Optional[str] = None, + version: Optional[gcdc_version.Version] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a [Version][google.cloud.dialogflow.cx.v3.Version] in + the specified [Flow][google.cloud.dialogflow.cx.v3.Flow]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [CreateVersionOperationMetadata][google.cloud.dialogflow.cx.v3.CreateVersionOperationMetadata] + - ``response``: + [Version][google.cloud.dialogflow.cx.v3.Version] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_version(): + # Create a client + client = dialogflowcx_v3.VersionsClient() + + # Initialize request argument(s) + version = dialogflowcx_v3.Version() + version.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateVersionRequest( + parent="parent_value", + version=version, + ) + + # Make the request + operation = client.create_version(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateVersionRequest, dict]): + The request object. The request message for + [Versions.CreateVersion][google.cloud.dialogflow.cx.v3.Versions.CreateVersion]. + parent (str): + Required. The [Flow][google.cloud.dialogflow.cx.v3.Flow] + to create an + [Version][google.cloud.dialogflow.cx.v3.Version] for. + Format: + ``projects//locations//agents//flows/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + version (google.cloud.dialogflowcx_v3.types.Version): + Required. The version to create. + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dialogflowcx_v3.types.Version` + Represents a version of a flow. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, version]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_version.CreateVersionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_version.CreateVersionRequest): + request = gcdc_version.CreateVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if version is not None: + request.version = version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_version] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcdc_version.Version, + metadata_type=gcdc_version.CreateVersionOperationMetadata, + ) + + # Done; return the response. + return response + + def update_version( + self, + request: Optional[Union[gcdc_version.UpdateVersionRequest, dict]] = None, + *, + version: Optional[gcdc_version.Version] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_version.Version: + r"""Updates the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_version(): + # Create a client + client = dialogflowcx_v3.VersionsClient() + + # Initialize request argument(s) + version = dialogflowcx_v3.Version() + version.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateVersionRequest( + version=version, + ) + + # Make the request + response = client.update_version(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateVersionRequest, dict]): + The request object. The request message for + [Versions.UpdateVersion][google.cloud.dialogflow.cx.v3.Versions.UpdateVersion]. + version (google.cloud.dialogflowcx_v3.types.Version): + Required. The version to update. + This corresponds to the ``version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The mask to control which fields get updated. + Currently only ``description`` and ``display_name`` can + be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Version: + Represents a version of a flow. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([version, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_version.UpdateVersionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_version.UpdateVersionRequest): + request = gcdc_version.UpdateVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if version is not None: + request.version = version + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_version] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("version.name", request.version.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_version( + self, + request: Optional[Union[version.DeleteVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_version(): + # Create a client + client = dialogflowcx_v3.VersionsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteVersionRequest( + name="name_value", + ) + + # Make the request + client.delete_version(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeleteVersionRequest, dict]): + The request object. The request message for + [Versions.DeleteVersion][google.cloud.dialogflow.cx.v3.Versions.DeleteVersion]. + name (str): + Required. The name of the + [Version][google.cloud.dialogflow.cx.v3.Version] to + delete. Format: + ``projects//locations//agents//flows//versions/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a version.DeleteVersionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, version.DeleteVersionRequest): + request = version.DeleteVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_version] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def load_version( + self, + request: Optional[Union[version.LoadVersionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Loads resources in the specified version to the draft flow. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_load_version(): + # Create a client + client = dialogflowcx_v3.VersionsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.LoadVersionRequest( + name="name_value", + ) + + # Make the request + operation = client.load_version(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.LoadVersionRequest, dict]): + The request object. The request message for + [Versions.LoadVersion][google.cloud.dialogflow.cx.v3.Versions.LoadVersion]. + name (str): + Required. The + [Version][google.cloud.dialogflow.cx.v3.Version] to be + loaded to draft flow. Format: + ``projects//locations//agents//flows//versions/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a version.LoadVersionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, version.LoadVersionRequest): + request = version.LoadVersionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.load_version] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=struct_pb2.Struct, + ) + + # Done; return the response. + return response + + def compare_versions( + self, + request: Optional[Union[version.CompareVersionsRequest, dict]] = None, + *, + base_version: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> version.CompareVersionsResponse: + r"""Compares the specified base version with target + version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_compare_versions(): + # Create a client + client = dialogflowcx_v3.VersionsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.CompareVersionsRequest( + base_version="base_version_value", + target_version="target_version_value", + ) + + # Make the request + response = client.compare_versions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CompareVersionsRequest, dict]): + The request object. The request message for + [Versions.CompareVersions][google.cloud.dialogflow.cx.v3.Versions.CompareVersions]. + base_version (str): + Required. Name of the base flow version to compare with + the target version. Use version ID ``0`` to indicate the + draft version of the specified flow. + + Format: + ``projects//locations//agents/ /flows//versions/``. + + This corresponds to the ``base_version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.CompareVersionsResponse: + The response message for + [Versions.CompareVersions][google.cloud.dialogflow.cx.v3.Versions.CompareVersions]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([base_version]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a version.CompareVersionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, version.CompareVersionsRequest): + request = version.CompareVersionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if base_version is not None: + request.base_version = base_version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.compare_versions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("base_version", request.base_version),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "VersionsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VersionsClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/pagers.py new file mode 100644 index 000000000000..30c2c797dfb6 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import version + + +class ListVersionsPager: + """A pager for iterating through ``list_versions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListVersionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``versions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListVersions`` requests and continue to iterate + through the ``versions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListVersionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., version.ListVersionsResponse], + request: version.ListVersionsRequest, + response: version.ListVersionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListVersionsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListVersionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = version.ListVersionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[version.ListVersionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[version.Version]: + for page in self.pages: + yield from page.versions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListVersionsAsyncPager: + """A pager for iterating through ``list_versions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListVersionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``versions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListVersions`` requests and continue to iterate + through the ``versions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListVersionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[version.ListVersionsResponse]], + request: version.ListVersionsRequest, + response: version.ListVersionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListVersionsRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListVersionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = version.ListVersionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[version.ListVersionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[version.Version]: + async def async_generator(): + async for page in self.pages: + for response in page.versions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/__init__.py new file mode 100644 index 000000000000..a4b6bc506d34 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import VersionsTransport +from .grpc import VersionsGrpcTransport +from .grpc_asyncio import VersionsGrpcAsyncIOTransport +from .rest import VersionsRestInterceptor, VersionsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[VersionsTransport]] +_transport_registry["grpc"] = VersionsGrpcTransport +_transport_registry["grpc_asyncio"] = VersionsGrpcAsyncIOTransport +_transport_registry["rest"] = VersionsRestTransport + +__all__ = ( + "VersionsTransport", + "VersionsGrpcTransport", + "VersionsGrpcAsyncIOTransport", + "VersionsRestTransport", + "VersionsRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/base.py new file mode 100644 index 000000000000..63601e7bf6f1 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/base.py @@ -0,0 +1,300 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import version +from google.cloud.dialogflowcx_v3.types import version as gcdc_version + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class VersionsTransport(abc.ABC): + """Abstract transport class for Versions.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_versions: gapic_v1.method.wrap_method( + self.list_versions, + default_timeout=None, + client_info=client_info, + ), + self.get_version: gapic_v1.method.wrap_method( + self.get_version, + default_timeout=None, + client_info=client_info, + ), + self.create_version: gapic_v1.method.wrap_method( + self.create_version, + default_timeout=None, + client_info=client_info, + ), + self.update_version: gapic_v1.method.wrap_method( + self.update_version, + default_timeout=None, + client_info=client_info, + ), + self.delete_version: gapic_v1.method.wrap_method( + self.delete_version, + default_timeout=None, + client_info=client_info, + ), + self.load_version: gapic_v1.method.wrap_method( + self.load_version, + default_timeout=None, + client_info=client_info, + ), + self.compare_versions: gapic_v1.method.wrap_method( + self.compare_versions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_versions( + self, + ) -> Callable[ + [version.ListVersionsRequest], + Union[version.ListVersionsResponse, Awaitable[version.ListVersionsResponse]], + ]: + raise NotImplementedError() + + @property + def get_version( + self, + ) -> Callable[ + [version.GetVersionRequest], Union[version.Version, Awaitable[version.Version]] + ]: + raise NotImplementedError() + + @property + def create_version( + self, + ) -> Callable[ + [gcdc_version.CreateVersionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_version( + self, + ) -> Callable[ + [gcdc_version.UpdateVersionRequest], + Union[gcdc_version.Version, Awaitable[gcdc_version.Version]], + ]: + raise NotImplementedError() + + @property + def delete_version( + self, + ) -> Callable[ + [version.DeleteVersionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def load_version( + self, + ) -> Callable[ + [version.LoadVersionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def compare_versions( + self, + ) -> Callable[ + [version.CompareVersionsRequest], + Union[ + version.CompareVersionsResponse, Awaitable[version.CompareVersionsResponse] + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("VersionsTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/grpc.py new file mode 100644 index 000000000000..7115a3d8adb0 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/grpc.py @@ -0,0 +1,554 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import version +from google.cloud.dialogflowcx_v3.types import version as gcdc_version + +from .base import DEFAULT_CLIENT_INFO, VersionsTransport + + +class VersionsGrpcTransport(VersionsTransport): + """gRPC backend transport for Versions. + + Service for managing + [Versions][google.cloud.dialogflow.cx.v3.Version]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_versions( + self, + ) -> Callable[[version.ListVersionsRequest], version.ListVersionsResponse]: + r"""Return a callable for the list versions method over gRPC. + + Returns the list of all versions in the specified + [Flow][google.cloud.dialogflow.cx.v3.Flow]. + + Returns: + Callable[[~.ListVersionsRequest], + ~.ListVersionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_versions" not in self._stubs: + self._stubs["list_versions"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/ListVersions", + request_serializer=version.ListVersionsRequest.serialize, + response_deserializer=version.ListVersionsResponse.deserialize, + ) + return self._stubs["list_versions"] + + @property + def get_version(self) -> Callable[[version.GetVersionRequest], version.Version]: + r"""Return a callable for the get version method over gRPC. + + Retrieves the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + Returns: + Callable[[~.GetVersionRequest], + ~.Version]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_version" not in self._stubs: + self._stubs["get_version"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/GetVersion", + request_serializer=version.GetVersionRequest.serialize, + response_deserializer=version.Version.deserialize, + ) + return self._stubs["get_version"] + + @property + def create_version( + self, + ) -> Callable[[gcdc_version.CreateVersionRequest], operations_pb2.Operation]: + r"""Return a callable for the create version method over gRPC. + + Creates a [Version][google.cloud.dialogflow.cx.v3.Version] in + the specified [Flow][google.cloud.dialogflow.cx.v3.Flow]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [CreateVersionOperationMetadata][google.cloud.dialogflow.cx.v3.CreateVersionOperationMetadata] + - ``response``: + [Version][google.cloud.dialogflow.cx.v3.Version] + + Returns: + Callable[[~.CreateVersionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_version" not in self._stubs: + self._stubs["create_version"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/CreateVersion", + request_serializer=gcdc_version.CreateVersionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_version"] + + @property + def update_version( + self, + ) -> Callable[[gcdc_version.UpdateVersionRequest], gcdc_version.Version]: + r"""Return a callable for the update version method over gRPC. + + Updates the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + Returns: + Callable[[~.UpdateVersionRequest], + ~.Version]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_version" not in self._stubs: + self._stubs["update_version"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/UpdateVersion", + request_serializer=gcdc_version.UpdateVersionRequest.serialize, + response_deserializer=gcdc_version.Version.deserialize, + ) + return self._stubs["update_version"] + + @property + def delete_version( + self, + ) -> Callable[[version.DeleteVersionRequest], empty_pb2.Empty]: + r"""Return a callable for the delete version method over gRPC. + + Deletes the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + Returns: + Callable[[~.DeleteVersionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_version" not in self._stubs: + self._stubs["delete_version"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/DeleteVersion", + request_serializer=version.DeleteVersionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_version"] + + @property + def load_version( + self, + ) -> Callable[[version.LoadVersionRequest], operations_pb2.Operation]: + r"""Return a callable for the load version method over gRPC. + + Loads resources in the specified version to the draft flow. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + Returns: + Callable[[~.LoadVersionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "load_version" not in self._stubs: + self._stubs["load_version"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/LoadVersion", + request_serializer=version.LoadVersionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["load_version"] + + @property + def compare_versions( + self, + ) -> Callable[[version.CompareVersionsRequest], version.CompareVersionsResponse]: + r"""Return a callable for the compare versions method over gRPC. + + Compares the specified base version with target + version. + + Returns: + Callable[[~.CompareVersionsRequest], + ~.CompareVersionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "compare_versions" not in self._stubs: + self._stubs["compare_versions"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/CompareVersions", + request_serializer=version.CompareVersionsRequest.serialize, + response_deserializer=version.CompareVersionsResponse.deserialize, + ) + return self._stubs["compare_versions"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("VersionsGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3ef383a9678c --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/grpc_asyncio.py @@ -0,0 +1,563 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import version +from google.cloud.dialogflowcx_v3.types import version as gcdc_version + +from .base import DEFAULT_CLIENT_INFO, VersionsTransport +from .grpc import VersionsGrpcTransport + + +class VersionsGrpcAsyncIOTransport(VersionsTransport): + """gRPC AsyncIO backend transport for Versions. + + Service for managing + [Versions][google.cloud.dialogflow.cx.v3.Version]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_versions( + self, + ) -> Callable[ + [version.ListVersionsRequest], Awaitable[version.ListVersionsResponse] + ]: + r"""Return a callable for the list versions method over gRPC. + + Returns the list of all versions in the specified + [Flow][google.cloud.dialogflow.cx.v3.Flow]. + + Returns: + Callable[[~.ListVersionsRequest], + Awaitable[~.ListVersionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_versions" not in self._stubs: + self._stubs["list_versions"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/ListVersions", + request_serializer=version.ListVersionsRequest.serialize, + response_deserializer=version.ListVersionsResponse.deserialize, + ) + return self._stubs["list_versions"] + + @property + def get_version( + self, + ) -> Callable[[version.GetVersionRequest], Awaitable[version.Version]]: + r"""Return a callable for the get version method over gRPC. + + Retrieves the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + Returns: + Callable[[~.GetVersionRequest], + Awaitable[~.Version]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_version" not in self._stubs: + self._stubs["get_version"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/GetVersion", + request_serializer=version.GetVersionRequest.serialize, + response_deserializer=version.Version.deserialize, + ) + return self._stubs["get_version"] + + @property + def create_version( + self, + ) -> Callable[ + [gcdc_version.CreateVersionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create version method over gRPC. + + Creates a [Version][google.cloud.dialogflow.cx.v3.Version] in + the specified [Flow][google.cloud.dialogflow.cx.v3.Flow]. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [CreateVersionOperationMetadata][google.cloud.dialogflow.cx.v3.CreateVersionOperationMetadata] + - ``response``: + [Version][google.cloud.dialogflow.cx.v3.Version] + + Returns: + Callable[[~.CreateVersionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_version" not in self._stubs: + self._stubs["create_version"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/CreateVersion", + request_serializer=gcdc_version.CreateVersionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_version"] + + @property + def update_version( + self, + ) -> Callable[[gcdc_version.UpdateVersionRequest], Awaitable[gcdc_version.Version]]: + r"""Return a callable for the update version method over gRPC. + + Updates the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + Returns: + Callable[[~.UpdateVersionRequest], + Awaitable[~.Version]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_version" not in self._stubs: + self._stubs["update_version"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/UpdateVersion", + request_serializer=gcdc_version.UpdateVersionRequest.serialize, + response_deserializer=gcdc_version.Version.deserialize, + ) + return self._stubs["update_version"] + + @property + def delete_version( + self, + ) -> Callable[[version.DeleteVersionRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete version method over gRPC. + + Deletes the specified + [Version][google.cloud.dialogflow.cx.v3.Version]. + + Returns: + Callable[[~.DeleteVersionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_version" not in self._stubs: + self._stubs["delete_version"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/DeleteVersion", + request_serializer=version.DeleteVersionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_version"] + + @property + def load_version( + self, + ) -> Callable[[version.LoadVersionRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the load version method over gRPC. + + Loads resources in the specified version to the draft flow. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: An empty `Struct + message `__ + - ``response``: An `Empty + message `__ + + Returns: + Callable[[~.LoadVersionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "load_version" not in self._stubs: + self._stubs["load_version"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/LoadVersion", + request_serializer=version.LoadVersionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["load_version"] + + @property + def compare_versions( + self, + ) -> Callable[ + [version.CompareVersionsRequest], Awaitable[version.CompareVersionsResponse] + ]: + r"""Return a callable for the compare versions method over gRPC. + + Compares the specified base version with target + version. + + Returns: + Callable[[~.CompareVersionsRequest], + Awaitable[~.CompareVersionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "compare_versions" not in self._stubs: + self._stubs["compare_versions"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Versions/CompareVersions", + request_serializer=version.CompareVersionsRequest.serialize, + response_deserializer=version.CompareVersionsResponse.deserialize, + ) + return self._stubs["compare_versions"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("VersionsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/rest.py new file mode 100644 index 000000000000..99e8089b5e77 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/rest.py @@ -0,0 +1,1593 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import version +from google.cloud.dialogflowcx_v3.types import version as gcdc_version + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import VersionsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class VersionsRestInterceptor: + """Interceptor for Versions. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the VersionsRestTransport. + + .. code-block:: python + class MyCustomVersionsInterceptor(VersionsRestInterceptor): + def pre_compare_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_compare_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_versions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_versions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_load_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_load_version(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_version(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_version(self, response): + logging.log(f"Received response: {response}") + return response + + transport = VersionsRestTransport(interceptor=MyCustomVersionsInterceptor()) + client = VersionsClient(transport=transport) + + + """ + + def pre_compare_versions( + self, + request: version.CompareVersionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[version.CompareVersionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for compare_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def post_compare_versions( + self, response: version.CompareVersionsResponse + ) -> version.CompareVersionsResponse: + """Post-rpc interceptor for compare_versions + + Override in a subclass to manipulate the response + after it is returned by the Versions server but before + it is returned to user code. + """ + return response + + def pre_create_version( + self, + request: gcdc_version.CreateVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_version.CreateVersionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def post_create_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_version + + Override in a subclass to manipulate the response + after it is returned by the Versions server but before + it is returned to user code. + """ + return response + + def pre_delete_version( + self, request: version.DeleteVersionRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[version.DeleteVersionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def pre_get_version( + self, request: version.GetVersionRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[version.GetVersionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def post_get_version(self, response: version.Version) -> version.Version: + """Post-rpc interceptor for get_version + + Override in a subclass to manipulate the response + after it is returned by the Versions server but before + it is returned to user code. + """ + return response + + def pre_list_versions( + self, request: version.ListVersionsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[version.ListVersionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_versions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def post_list_versions( + self, response: version.ListVersionsResponse + ) -> version.ListVersionsResponse: + """Post-rpc interceptor for list_versions + + Override in a subclass to manipulate the response + after it is returned by the Versions server but before + it is returned to user code. + """ + return response + + def pre_load_version( + self, request: version.LoadVersionRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[version.LoadVersionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for load_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def post_load_version( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for load_version + + Override in a subclass to manipulate the response + after it is returned by the Versions server but before + it is returned to user code. + """ + return response + + def pre_update_version( + self, + request: gcdc_version.UpdateVersionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_version.UpdateVersionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_version + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def post_update_version( + self, response: gcdc_version.Version + ) -> gcdc_version.Version: + """Post-rpc interceptor for update_version + + Override in a subclass to manipulate the response + after it is returned by the Versions server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Versions server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Versions server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Versions server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Versions server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Versions server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Versions server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class VersionsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: VersionsRestInterceptor + + +class VersionsRestTransport(VersionsTransport): + """REST backend transport for Versions. + + Service for managing + [Versions][google.cloud.dialogflow.cx.v3.Version]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[VersionsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or VersionsRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v3", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CompareVersions(VersionsRestStub): + def __hash__(self): + return hash("CompareVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: version.CompareVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> version.CompareVersionsResponse: + r"""Call the compare versions method over HTTP. + + Args: + request (~.version.CompareVersionsRequest): + The request object. The request message for + [Versions.CompareVersions][google.cloud.dialogflow.cx.v3.Versions.CompareVersions]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.version.CompareVersionsResponse: + The response message for + [Versions.CompareVersions][google.cloud.dialogflow.cx.v3.Versions.CompareVersions]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{base_version=projects/*/locations/*/agents/*/flows/*/versions/*}:compareVersions", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_compare_versions( + request, metadata + ) + pb_request = version.CompareVersionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = version.CompareVersionsResponse() + pb_resp = version.CompareVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_compare_versions(resp) + return resp + + class _CreateVersion(VersionsRestStub): + def __hash__(self): + return hash("CreateVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_version.CreateVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create version method over HTTP. + + Args: + request (~.gcdc_version.CreateVersionRequest): + The request object. The request message for + [Versions.CreateVersion][google.cloud.dialogflow.cx.v3.Versions.CreateVersion]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/flows/*}/versions", + "body": "version", + }, + ] + request, metadata = self._interceptor.pre_create_version(request, metadata) + pb_request = gcdc_version.CreateVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_version(resp) + return resp + + class _DeleteVersion(VersionsRestStub): + def __hash__(self): + return hash("DeleteVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: version.DeleteVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete version method over HTTP. + + Args: + request (~.version.DeleteVersionRequest): + The request object. The request message for + [Versions.DeleteVersion][google.cloud.dialogflow.cx.v3.Versions.DeleteVersion]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*/versions/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_version(request, metadata) + pb_request = version.DeleteVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetVersion(VersionsRestStub): + def __hash__(self): + return hash("GetVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: version.GetVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> version.Version: + r"""Call the get version method over HTTP. + + Args: + request (~.version.GetVersionRequest): + The request object. The request message for + [Versions.GetVersion][google.cloud.dialogflow.cx.v3.Versions.GetVersion]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.version.Version: + Represents a version of a flow. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*/versions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_version(request, metadata) + pb_request = version.GetVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = version.Version() + pb_resp = version.Version.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_version(resp) + return resp + + class _ListVersions(VersionsRestStub): + def __hash__(self): + return hash("ListVersions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: version.ListVersionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> version.ListVersionsResponse: + r"""Call the list versions method over HTTP. + + Args: + request (~.version.ListVersionsRequest): + The request object. The request message for + [Versions.ListVersions][google.cloud.dialogflow.cx.v3.Versions.ListVersions]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.version.ListVersionsResponse: + The response message for + [Versions.ListVersions][google.cloud.dialogflow.cx.v3.Versions.ListVersions]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*/flows/*}/versions", + }, + ] + request, metadata = self._interceptor.pre_list_versions(request, metadata) + pb_request = version.ListVersionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = version.ListVersionsResponse() + pb_resp = version.ListVersionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_versions(resp) + return resp + + class _LoadVersion(VersionsRestStub): + def __hash__(self): + return hash("LoadVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: version.LoadVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the load version method over HTTP. + + Args: + request (~.version.LoadVersionRequest): + The request object. The request message for + [Versions.LoadVersion][google.cloud.dialogflow.cx.v3.Versions.LoadVersion]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/agents/*/flows/*/versions/*}:load", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_load_version(request, metadata) + pb_request = version.LoadVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_load_version(resp) + return resp + + class _UpdateVersion(VersionsRestStub): + def __hash__(self): + return hash("UpdateVersion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_version.UpdateVersionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_version.Version: + r"""Call the update version method over HTTP. + + Args: + request (~.gcdc_version.UpdateVersionRequest): + The request object. The request message for + [Versions.UpdateVersion][google.cloud.dialogflow.cx.v3.Versions.UpdateVersion]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_version.Version: + Represents a version of a flow. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{version.name=projects/*/locations/*/agents/*/flows/*/versions/*}", + "body": "version", + }, + ] + request, metadata = self._interceptor.pre_update_version(request, metadata) + pb_request = gcdc_version.UpdateVersionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_version.Version() + pb_resp = gcdc_version.Version.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_version(resp) + return resp + + @property + def compare_versions( + self, + ) -> Callable[[version.CompareVersionsRequest], version.CompareVersionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CompareVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_version( + self, + ) -> Callable[[gcdc_version.CreateVersionRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_version( + self, + ) -> Callable[[version.DeleteVersionRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_version(self) -> Callable[[version.GetVersionRequest], version.Version]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_versions( + self, + ) -> Callable[[version.ListVersionsRequest], version.ListVersionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListVersions(self._session, self._host, self._interceptor) # type: ignore + + @property + def load_version( + self, + ) -> Callable[[version.LoadVersionRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LoadVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_version( + self, + ) -> Callable[[gcdc_version.UpdateVersionRequest], gcdc_version.Version]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateVersion(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(VersionsRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(VersionsRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(VersionsRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(VersionsRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(VersionsRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("VersionsRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/__init__.py new file mode 100644 index 000000000000..de52a38de6bf --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import WebhooksAsyncClient +from .client import WebhooksClient + +__all__ = ( + "WebhooksClient", + "WebhooksAsyncClient", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py new file mode 100644 index 000000000000..12bcfe09a2e5 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/async_client.py @@ -0,0 +1,1056 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.webhooks import pagers +from google.cloud.dialogflowcx_v3.types import webhook +from google.cloud.dialogflowcx_v3.types import webhook as gcdc_webhook + +from .client import WebhooksClient +from .transports.base import DEFAULT_CLIENT_INFO, WebhooksTransport +from .transports.grpc_asyncio import WebhooksGrpcAsyncIOTransport + + +class WebhooksAsyncClient: + """Service for managing + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook]. + """ + + _client: WebhooksClient + + DEFAULT_ENDPOINT = WebhooksClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = WebhooksClient.DEFAULT_MTLS_ENDPOINT + + service_path = staticmethod(WebhooksClient.service_path) + parse_service_path = staticmethod(WebhooksClient.parse_service_path) + webhook_path = staticmethod(WebhooksClient.webhook_path) + parse_webhook_path = staticmethod(WebhooksClient.parse_webhook_path) + common_billing_account_path = staticmethod( + WebhooksClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + WebhooksClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(WebhooksClient.common_folder_path) + parse_common_folder_path = staticmethod(WebhooksClient.parse_common_folder_path) + common_organization_path = staticmethod(WebhooksClient.common_organization_path) + parse_common_organization_path = staticmethod( + WebhooksClient.parse_common_organization_path + ) + common_project_path = staticmethod(WebhooksClient.common_project_path) + parse_common_project_path = staticmethod(WebhooksClient.parse_common_project_path) + common_location_path = staticmethod(WebhooksClient.common_location_path) + parse_common_location_path = staticmethod(WebhooksClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WebhooksAsyncClient: The constructed client. + """ + return WebhooksClient.from_service_account_info.__func__(WebhooksAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WebhooksAsyncClient: The constructed client. + """ + return WebhooksClient.from_service_account_file.__func__(WebhooksAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return WebhooksClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> WebhooksTransport: + """Returns the transport used by the client instance. + + Returns: + WebhooksTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(WebhooksClient).get_transport_class, type(WebhooksClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, WebhooksTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the webhooks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.WebhooksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = WebhooksClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_webhooks( + self, + request: Optional[Union[webhook.ListWebhooksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWebhooksAsyncPager: + r"""Returns the list of all webhooks in the specified + agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_list_webhooks(): + # Create a client + client = dialogflowcx_v3.WebhooksAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListWebhooksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_webhooks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ListWebhooksRequest, dict]]): + The request object. The request message for + [Webhooks.ListWebhooks][google.cloud.dialogflow.cx.v3.Webhooks.ListWebhooks]. + parent (:class:`str`): + Required. The agent to list all webhooks for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.webhooks.pagers.ListWebhooksAsyncPager: + The response message for + [Webhooks.ListWebhooks][google.cloud.dialogflow.cx.v3.Webhooks.ListWebhooks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = webhook.ListWebhooksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_webhooks, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWebhooksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_webhook( + self, + request: Optional[Union[webhook.GetWebhookRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> webhook.Webhook: + r"""Retrieves the specified webhook. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_get_webhook(): + # Create a client + client = dialogflowcx_v3.WebhooksAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetWebhookRequest( + name="name_value", + ) + + # Make the request + response = await client.get_webhook(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.GetWebhookRequest, dict]]): + The request object. The request message for + [Webhooks.GetWebhook][google.cloud.dialogflow.cx.v3.Webhooks.GetWebhook]. + name (:class:`str`): + Required. The name of the webhook. Format: + ``projects//locations//agents//webhooks/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Webhook: + Webhooks host the developer's + business logic. During a session, + webhooks allow the developer to use the + data extracted by Dialogflow's natural + language processing to generate dynamic + responses, validate collected data, or + trigger actions on the backend. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = webhook.GetWebhookRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_webhook, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_webhook( + self, + request: Optional[Union[gcdc_webhook.CreateWebhookRequest, dict]] = None, + *, + parent: Optional[str] = None, + webhook: Optional[gcdc_webhook.Webhook] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_webhook.Webhook: + r"""Creates a webhook in the specified agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_create_webhook(): + # Create a client + client = dialogflowcx_v3.WebhooksAsyncClient() + + # Initialize request argument(s) + webhook = dialogflowcx_v3.Webhook() + webhook.generic_web_service.uri = "uri_value" + webhook.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateWebhookRequest( + parent="parent_value", + webhook=webhook, + ) + + # Make the request + response = await client.create_webhook(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.CreateWebhookRequest, dict]]): + The request object. The request message for + [Webhooks.CreateWebhook][google.cloud.dialogflow.cx.v3.Webhooks.CreateWebhook]. + parent (:class:`str`): + Required. The agent to create a webhook for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + webhook (:class:`google.cloud.dialogflowcx_v3.types.Webhook`): + Required. The webhook to create. + This corresponds to the ``webhook`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Webhook: + Webhooks host the developer's + business logic. During a session, + webhooks allow the developer to use the + data extracted by Dialogflow's natural + language processing to generate dynamic + responses, validate collected data, or + trigger actions on the backend. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, webhook]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_webhook.CreateWebhookRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if webhook is not None: + request.webhook = webhook + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_webhook, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_webhook( + self, + request: Optional[Union[gcdc_webhook.UpdateWebhookRequest, dict]] = None, + *, + webhook: Optional[gcdc_webhook.Webhook] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_webhook.Webhook: + r"""Updates the specified webhook. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_update_webhook(): + # Create a client + client = dialogflowcx_v3.WebhooksAsyncClient() + + # Initialize request argument(s) + webhook = dialogflowcx_v3.Webhook() + webhook.generic_web_service.uri = "uri_value" + webhook.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateWebhookRequest( + webhook=webhook, + ) + + # Make the request + response = await client.update_webhook(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.UpdateWebhookRequest, dict]]): + The request object. The request message for + [Webhooks.UpdateWebhook][google.cloud.dialogflow.cx.v3.Webhooks.UpdateWebhook]. + webhook (:class:`google.cloud.dialogflowcx_v3.types.Webhook`): + Required. The webhook to update. + This corresponds to the ``webhook`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The mask to control which fields get + updated. If the mask is not present, all + fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Webhook: + Webhooks host the developer's + business logic. During a session, + webhooks allow the developer to use the + data extracted by Dialogflow's natural + language processing to generate dynamic + responses, validate collected data, or + trigger actions on the backend. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([webhook, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcdc_webhook.UpdateWebhookRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if webhook is not None: + request.webhook = webhook + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_webhook, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("webhook.name", request.webhook.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_webhook( + self, + request: Optional[Union[webhook.DeleteWebhookRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified webhook. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_delete_webhook(): + # Create a client + client = dialogflowcx_v3.WebhooksAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteWebhookRequest( + name="name_value", + ) + + # Make the request + await client.delete_webhook(request=request) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.DeleteWebhookRequest, dict]]): + The request object. The request message for + [Webhooks.DeleteWebhook][google.cloud.dialogflow.cx.v3.Webhooks.DeleteWebhook]. + name (:class:`str`): + Required. The name of the webhook to delete. Format: + ``projects//locations//agents//webhooks/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = webhook.DeleteWebhookRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_webhook, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "WebhooksAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("WebhooksAsyncClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py new file mode 100644 index 000000000000..e86afe78b127 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py @@ -0,0 +1,1322 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.services.webhooks import pagers +from google.cloud.dialogflowcx_v3.types import webhook +from google.cloud.dialogflowcx_v3.types import webhook as gcdc_webhook + +from .transports.base import DEFAULT_CLIENT_INFO, WebhooksTransport +from .transports.grpc import WebhooksGrpcTransport +from .transports.grpc_asyncio import WebhooksGrpcAsyncIOTransport +from .transports.rest import WebhooksRestTransport + + +class WebhooksClientMeta(type): + """Metaclass for the Webhooks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[WebhooksTransport]] + _transport_registry["grpc"] = WebhooksGrpcTransport + _transport_registry["grpc_asyncio"] = WebhooksGrpcAsyncIOTransport + _transport_registry["rest"] = WebhooksRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[WebhooksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class WebhooksClient(metaclass=WebhooksClientMeta): + """Service for managing + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WebhooksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WebhooksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> WebhooksTransport: + """Returns the transport used by the client instance. + + Returns: + WebhooksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def service_path( + project: str, + location: str, + namespace: str, + service: str, + ) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/locations/{location}/namespaces/{namespace}/services/{service}".format( + project=project, + location=location, + namespace=namespace, + service=service, + ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str, str]: + """Parses a service path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/namespaces/(?P.+?)/services/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def webhook_path( + project: str, + location: str, + agent: str, + webhook: str, + ) -> str: + """Returns a fully-qualified webhook string.""" + return "projects/{project}/locations/{location}/agents/{agent}/webhooks/{webhook}".format( + project=project, + location=location, + agent=agent, + webhook=webhook, + ) + + @staticmethod + def parse_webhook_path(path: str) -> Dict[str, str]: + """Parses a webhook path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/agents/(?P.+?)/webhooks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, WebhooksTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the webhooks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, WebhooksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, WebhooksTransport): + # transport is a WebhooksTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_webhooks( + self, + request: Optional[Union[webhook.ListWebhooksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWebhooksPager: + r"""Returns the list of all webhooks in the specified + agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_list_webhooks(): + # Create a client + client = dialogflowcx_v3.WebhooksClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ListWebhooksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_webhooks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ListWebhooksRequest, dict]): + The request object. The request message for + [Webhooks.ListWebhooks][google.cloud.dialogflow.cx.v3.Webhooks.ListWebhooks]. + parent (str): + Required. The agent to list all webhooks for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.services.webhooks.pagers.ListWebhooksPager: + The response message for + [Webhooks.ListWebhooks][google.cloud.dialogflow.cx.v3.Webhooks.ListWebhooks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a webhook.ListWebhooksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, webhook.ListWebhooksRequest): + request = webhook.ListWebhooksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_webhooks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWebhooksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_webhook( + self, + request: Optional[Union[webhook.GetWebhookRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> webhook.Webhook: + r"""Retrieves the specified webhook. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_get_webhook(): + # Create a client + client = dialogflowcx_v3.WebhooksClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.GetWebhookRequest( + name="name_value", + ) + + # Make the request + response = client.get_webhook(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.GetWebhookRequest, dict]): + The request object. The request message for + [Webhooks.GetWebhook][google.cloud.dialogflow.cx.v3.Webhooks.GetWebhook]. + name (str): + Required. The name of the webhook. Format: + ``projects//locations//agents//webhooks/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Webhook: + Webhooks host the developer's + business logic. During a session, + webhooks allow the developer to use the + data extracted by Dialogflow's natural + language processing to generate dynamic + responses, validate collected data, or + trigger actions on the backend. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a webhook.GetWebhookRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, webhook.GetWebhookRequest): + request = webhook.GetWebhookRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_webhook] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_webhook( + self, + request: Optional[Union[gcdc_webhook.CreateWebhookRequest, dict]] = None, + *, + parent: Optional[str] = None, + webhook: Optional[gcdc_webhook.Webhook] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_webhook.Webhook: + r"""Creates a webhook in the specified agent. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_create_webhook(): + # Create a client + client = dialogflowcx_v3.WebhooksClient() + + # Initialize request argument(s) + webhook = dialogflowcx_v3.Webhook() + webhook.generic_web_service.uri = "uri_value" + webhook.display_name = "display_name_value" + + request = dialogflowcx_v3.CreateWebhookRequest( + parent="parent_value", + webhook=webhook, + ) + + # Make the request + response = client.create_webhook(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.CreateWebhookRequest, dict]): + The request object. The request message for + [Webhooks.CreateWebhook][google.cloud.dialogflow.cx.v3.Webhooks.CreateWebhook]. + parent (str): + Required. The agent to create a webhook for. Format: + ``projects//locations//agents/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + webhook (google.cloud.dialogflowcx_v3.types.Webhook): + Required. The webhook to create. + This corresponds to the ``webhook`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Webhook: + Webhooks host the developer's + business logic. During a session, + webhooks allow the developer to use the + data extracted by Dialogflow's natural + language processing to generate dynamic + responses, validate collected data, or + trigger actions on the backend. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, webhook]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_webhook.CreateWebhookRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_webhook.CreateWebhookRequest): + request = gcdc_webhook.CreateWebhookRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if webhook is not None: + request.webhook = webhook + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_webhook] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_webhook( + self, + request: Optional[Union[gcdc_webhook.UpdateWebhookRequest, dict]] = None, + *, + webhook: Optional[gcdc_webhook.Webhook] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_webhook.Webhook: + r"""Updates the specified webhook. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_update_webhook(): + # Create a client + client = dialogflowcx_v3.WebhooksClient() + + # Initialize request argument(s) + webhook = dialogflowcx_v3.Webhook() + webhook.generic_web_service.uri = "uri_value" + webhook.display_name = "display_name_value" + + request = dialogflowcx_v3.UpdateWebhookRequest( + webhook=webhook, + ) + + # Make the request + response = client.update_webhook(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.UpdateWebhookRequest, dict]): + The request object. The request message for + [Webhooks.UpdateWebhook][google.cloud.dialogflow.cx.v3.Webhooks.UpdateWebhook]. + webhook (google.cloud.dialogflowcx_v3.types.Webhook): + Required. The webhook to update. + This corresponds to the ``webhook`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get + updated. If the mask is not present, all + fields will be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dialogflowcx_v3.types.Webhook: + Webhooks host the developer's + business logic. During a session, + webhooks allow the developer to use the + data extracted by Dialogflow's natural + language processing to generate dynamic + responses, validate collected data, or + trigger actions on the backend. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([webhook, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcdc_webhook.UpdateWebhookRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcdc_webhook.UpdateWebhookRequest): + request = gcdc_webhook.UpdateWebhookRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if webhook is not None: + request.webhook = webhook + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_webhook] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("webhook.name", request.webhook.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_webhook( + self, + request: Optional[Union[webhook.DeleteWebhookRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified webhook. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_delete_webhook(): + # Create a client + client = dialogflowcx_v3.WebhooksClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.DeleteWebhookRequest( + name="name_value", + ) + + # Make the request + client.delete_webhook(request=request) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.DeleteWebhookRequest, dict]): + The request object. The request message for + [Webhooks.DeleteWebhook][google.cloud.dialogflow.cx.v3.Webhooks.DeleteWebhook]. + name (str): + Required. The name of the webhook to delete. Format: + ``projects//locations//agents//webhooks/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a webhook.DeleteWebhookRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, webhook.DeleteWebhookRequest): + request = webhook.DeleteWebhookRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_webhook] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "WebhooksClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("WebhooksClient",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/pagers.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/pagers.py new file mode 100644 index 000000000000..b45855819b0a --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dialogflowcx_v3.types import webhook + + +class ListWebhooksPager: + """A pager for iterating through ``list_webhooks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListWebhooksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``webhooks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWebhooks`` requests and continue to iterate + through the ``webhooks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListWebhooksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., webhook.ListWebhooksResponse], + request: webhook.ListWebhooksRequest, + response: webhook.ListWebhooksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListWebhooksRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListWebhooksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = webhook.ListWebhooksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[webhook.ListWebhooksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[webhook.Webhook]: + for page in self.pages: + yield from page.webhooks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWebhooksAsyncPager: + """A pager for iterating through ``list_webhooks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflowcx_v3.types.ListWebhooksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``webhooks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWebhooks`` requests and continue to iterate + through the ``webhooks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflowcx_v3.types.ListWebhooksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[webhook.ListWebhooksResponse]], + request: webhook.ListWebhooksRequest, + response: webhook.ListWebhooksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflowcx_v3.types.ListWebhooksRequest): + The initial request object. + response (google.cloud.dialogflowcx_v3.types.ListWebhooksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = webhook.ListWebhooksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[webhook.ListWebhooksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[webhook.Webhook]: + async def async_generator(): + async for page in self.pages: + for response in page.webhooks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/__init__.py new file mode 100644 index 000000000000..76c9491b44c9 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import WebhooksTransport +from .grpc import WebhooksGrpcTransport +from .grpc_asyncio import WebhooksGrpcAsyncIOTransport +from .rest import WebhooksRestInterceptor, WebhooksRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[WebhooksTransport]] +_transport_registry["grpc"] = WebhooksGrpcTransport +_transport_registry["grpc_asyncio"] = WebhooksGrpcAsyncIOTransport +_transport_registry["rest"] = WebhooksRestTransport + +__all__ = ( + "WebhooksTransport", + "WebhooksGrpcTransport", + "WebhooksGrpcAsyncIOTransport", + "WebhooksRestTransport", + "WebhooksRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/base.py new file mode 100644 index 000000000000..3a84b1358154 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/base.py @@ -0,0 +1,265 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3 import gapic_version as package_version +from google.cloud.dialogflowcx_v3.types import webhook +from google.cloud.dialogflowcx_v3.types import webhook as gcdc_webhook + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class WebhooksTransport(abc.ABC): + """Abstract transport class for Webhooks.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_webhooks: gapic_v1.method.wrap_method( + self.list_webhooks, + default_timeout=None, + client_info=client_info, + ), + self.get_webhook: gapic_v1.method.wrap_method( + self.get_webhook, + default_timeout=None, + client_info=client_info, + ), + self.create_webhook: gapic_v1.method.wrap_method( + self.create_webhook, + default_timeout=None, + client_info=client_info, + ), + self.update_webhook: gapic_v1.method.wrap_method( + self.update_webhook, + default_timeout=None, + client_info=client_info, + ), + self.delete_webhook: gapic_v1.method.wrap_method( + self.delete_webhook, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_webhooks( + self, + ) -> Callable[ + [webhook.ListWebhooksRequest], + Union[webhook.ListWebhooksResponse, Awaitable[webhook.ListWebhooksResponse]], + ]: + raise NotImplementedError() + + @property + def get_webhook( + self, + ) -> Callable[ + [webhook.GetWebhookRequest], Union[webhook.Webhook, Awaitable[webhook.Webhook]] + ]: + raise NotImplementedError() + + @property + def create_webhook( + self, + ) -> Callable[ + [gcdc_webhook.CreateWebhookRequest], + Union[gcdc_webhook.Webhook, Awaitable[gcdc_webhook.Webhook]], + ]: + raise NotImplementedError() + + @property + def update_webhook( + self, + ) -> Callable[ + [gcdc_webhook.UpdateWebhookRequest], + Union[gcdc_webhook.Webhook, Awaitable[gcdc_webhook.Webhook]], + ]: + raise NotImplementedError() + + @property + def delete_webhook( + self, + ) -> Callable[ + [webhook.DeleteWebhookRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("WebhooksTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/grpc.py new file mode 100644 index 000000000000..8ae14b269588 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/grpc.py @@ -0,0 +1,462 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dialogflowcx_v3.types import webhook +from google.cloud.dialogflowcx_v3.types import webhook as gcdc_webhook + +from .base import DEFAULT_CLIENT_INFO, WebhooksTransport + + +class WebhooksGrpcTransport(WebhooksTransport): + """gRPC backend transport for Webhooks. + + Service for managing + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_webhooks( + self, + ) -> Callable[[webhook.ListWebhooksRequest], webhook.ListWebhooksResponse]: + r"""Return a callable for the list webhooks method over gRPC. + + Returns the list of all webhooks in the specified + agent. + + Returns: + Callable[[~.ListWebhooksRequest], + ~.ListWebhooksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_webhooks" not in self._stubs: + self._stubs["list_webhooks"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Webhooks/ListWebhooks", + request_serializer=webhook.ListWebhooksRequest.serialize, + response_deserializer=webhook.ListWebhooksResponse.deserialize, + ) + return self._stubs["list_webhooks"] + + @property + def get_webhook(self) -> Callable[[webhook.GetWebhookRequest], webhook.Webhook]: + r"""Return a callable for the get webhook method over gRPC. + + Retrieves the specified webhook. + + Returns: + Callable[[~.GetWebhookRequest], + ~.Webhook]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_webhook" not in self._stubs: + self._stubs["get_webhook"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Webhooks/GetWebhook", + request_serializer=webhook.GetWebhookRequest.serialize, + response_deserializer=webhook.Webhook.deserialize, + ) + return self._stubs["get_webhook"] + + @property + def create_webhook( + self, + ) -> Callable[[gcdc_webhook.CreateWebhookRequest], gcdc_webhook.Webhook]: + r"""Return a callable for the create webhook method over gRPC. + + Creates a webhook in the specified agent. + + Returns: + Callable[[~.CreateWebhookRequest], + ~.Webhook]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_webhook" not in self._stubs: + self._stubs["create_webhook"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Webhooks/CreateWebhook", + request_serializer=gcdc_webhook.CreateWebhookRequest.serialize, + response_deserializer=gcdc_webhook.Webhook.deserialize, + ) + return self._stubs["create_webhook"] + + @property + def update_webhook( + self, + ) -> Callable[[gcdc_webhook.UpdateWebhookRequest], gcdc_webhook.Webhook]: + r"""Return a callable for the update webhook method over gRPC. + + Updates the specified webhook. + + Returns: + Callable[[~.UpdateWebhookRequest], + ~.Webhook]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_webhook" not in self._stubs: + self._stubs["update_webhook"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Webhooks/UpdateWebhook", + request_serializer=gcdc_webhook.UpdateWebhookRequest.serialize, + response_deserializer=gcdc_webhook.Webhook.deserialize, + ) + return self._stubs["update_webhook"] + + @property + def delete_webhook( + self, + ) -> Callable[[webhook.DeleteWebhookRequest], empty_pb2.Empty]: + r"""Return a callable for the delete webhook method over gRPC. + + Deletes the specified webhook. + + Returns: + Callable[[~.DeleteWebhookRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_webhook" not in self._stubs: + self._stubs["delete_webhook"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Webhooks/DeleteWebhook", + request_serializer=webhook.DeleteWebhookRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_webhook"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("WebhooksGrpcTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/grpc_asyncio.py new file mode 100644 index 000000000000..724aa0b5830e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/grpc_asyncio.py @@ -0,0 +1,465 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dialogflowcx_v3.types import webhook +from google.cloud.dialogflowcx_v3.types import webhook as gcdc_webhook + +from .base import DEFAULT_CLIENT_INFO, WebhooksTransport +from .grpc import WebhooksGrpcTransport + + +class WebhooksGrpcAsyncIOTransport(WebhooksTransport): + """gRPC AsyncIO backend transport for Webhooks. + + Service for managing + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_webhooks( + self, + ) -> Callable[ + [webhook.ListWebhooksRequest], Awaitable[webhook.ListWebhooksResponse] + ]: + r"""Return a callable for the list webhooks method over gRPC. + + Returns the list of all webhooks in the specified + agent. + + Returns: + Callable[[~.ListWebhooksRequest], + Awaitable[~.ListWebhooksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_webhooks" not in self._stubs: + self._stubs["list_webhooks"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Webhooks/ListWebhooks", + request_serializer=webhook.ListWebhooksRequest.serialize, + response_deserializer=webhook.ListWebhooksResponse.deserialize, + ) + return self._stubs["list_webhooks"] + + @property + def get_webhook( + self, + ) -> Callable[[webhook.GetWebhookRequest], Awaitable[webhook.Webhook]]: + r"""Return a callable for the get webhook method over gRPC. + + Retrieves the specified webhook. + + Returns: + Callable[[~.GetWebhookRequest], + Awaitable[~.Webhook]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_webhook" not in self._stubs: + self._stubs["get_webhook"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Webhooks/GetWebhook", + request_serializer=webhook.GetWebhookRequest.serialize, + response_deserializer=webhook.Webhook.deserialize, + ) + return self._stubs["get_webhook"] + + @property + def create_webhook( + self, + ) -> Callable[[gcdc_webhook.CreateWebhookRequest], Awaitable[gcdc_webhook.Webhook]]: + r"""Return a callable for the create webhook method over gRPC. + + Creates a webhook in the specified agent. + + Returns: + Callable[[~.CreateWebhookRequest], + Awaitable[~.Webhook]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_webhook" not in self._stubs: + self._stubs["create_webhook"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Webhooks/CreateWebhook", + request_serializer=gcdc_webhook.CreateWebhookRequest.serialize, + response_deserializer=gcdc_webhook.Webhook.deserialize, + ) + return self._stubs["create_webhook"] + + @property + def update_webhook( + self, + ) -> Callable[[gcdc_webhook.UpdateWebhookRequest], Awaitable[gcdc_webhook.Webhook]]: + r"""Return a callable for the update webhook method over gRPC. + + Updates the specified webhook. + + Returns: + Callable[[~.UpdateWebhookRequest], + Awaitable[~.Webhook]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_webhook" not in self._stubs: + self._stubs["update_webhook"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Webhooks/UpdateWebhook", + request_serializer=gcdc_webhook.UpdateWebhookRequest.serialize, + response_deserializer=gcdc_webhook.Webhook.deserialize, + ) + return self._stubs["update_webhook"] + + @property + def delete_webhook( + self, + ) -> Callable[[webhook.DeleteWebhookRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete webhook method over gRPC. + + Deletes the specified webhook. + + Returns: + Callable[[~.DeleteWebhookRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_webhook" not in self._stubs: + self._stubs["delete_webhook"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Webhooks/DeleteWebhook", + request_serializer=webhook.DeleteWebhookRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_webhook"] + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("WebhooksGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/rest.py new file mode 100644 index 000000000000..67350928b892 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/rest.py @@ -0,0 +1,1273 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dialogflowcx_v3.types import webhook +from google.cloud.dialogflowcx_v3.types import webhook as gcdc_webhook + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import WebhooksTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class WebhooksRestInterceptor: + """Interceptor for Webhooks. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the WebhooksRestTransport. + + .. code-block:: python + class MyCustomWebhooksInterceptor(WebhooksRestInterceptor): + def pre_create_webhook(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_webhook(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_webhook(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_webhook(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_webhook(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_webhooks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_webhooks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_webhook(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_webhook(self, response): + logging.log(f"Received response: {response}") + return response + + transport = WebhooksRestTransport(interceptor=MyCustomWebhooksInterceptor()) + client = WebhooksClient(transport=transport) + + + """ + + def pre_create_webhook( + self, + request: gcdc_webhook.CreateWebhookRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_webhook.CreateWebhookRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_webhook + + Override in a subclass to manipulate the request or metadata + before they are sent to the Webhooks server. + """ + return request, metadata + + def post_create_webhook( + self, response: gcdc_webhook.Webhook + ) -> gcdc_webhook.Webhook: + """Post-rpc interceptor for create_webhook + + Override in a subclass to manipulate the response + after it is returned by the Webhooks server but before + it is returned to user code. + """ + return response + + def pre_delete_webhook( + self, request: webhook.DeleteWebhookRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[webhook.DeleteWebhookRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_webhook + + Override in a subclass to manipulate the request or metadata + before they are sent to the Webhooks server. + """ + return request, metadata + + def pre_get_webhook( + self, request: webhook.GetWebhookRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[webhook.GetWebhookRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_webhook + + Override in a subclass to manipulate the request or metadata + before they are sent to the Webhooks server. + """ + return request, metadata + + def post_get_webhook(self, response: webhook.Webhook) -> webhook.Webhook: + """Post-rpc interceptor for get_webhook + + Override in a subclass to manipulate the response + after it is returned by the Webhooks server but before + it is returned to user code. + """ + return response + + def pre_list_webhooks( + self, request: webhook.ListWebhooksRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[webhook.ListWebhooksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_webhooks + + Override in a subclass to manipulate the request or metadata + before they are sent to the Webhooks server. + """ + return request, metadata + + def post_list_webhooks( + self, response: webhook.ListWebhooksResponse + ) -> webhook.ListWebhooksResponse: + """Post-rpc interceptor for list_webhooks + + Override in a subclass to manipulate the response + after it is returned by the Webhooks server but before + it is returned to user code. + """ + return response + + def pre_update_webhook( + self, + request: gcdc_webhook.UpdateWebhookRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcdc_webhook.UpdateWebhookRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_webhook + + Override in a subclass to manipulate the request or metadata + before they are sent to the Webhooks server. + """ + return request, metadata + + def post_update_webhook( + self, response: gcdc_webhook.Webhook + ) -> gcdc_webhook.Webhook: + """Post-rpc interceptor for update_webhook + + Override in a subclass to manipulate the response + after it is returned by the Webhooks server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Webhooks server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Webhooks server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Webhooks server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Webhooks server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Webhooks server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Webhooks server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Webhooks server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Webhooks server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Webhooks server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Webhooks server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class WebhooksRestStub: + _session: AuthorizedSession + _host: str + _interceptor: WebhooksRestInterceptor + + +class WebhooksRestTransport(WebhooksTransport): + """REST backend transport for Webhooks. + + Service for managing + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[WebhooksRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or WebhooksRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateWebhook(WebhooksRestStub): + def __hash__(self): + return hash("CreateWebhook") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_webhook.CreateWebhookRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_webhook.Webhook: + r"""Call the create webhook method over HTTP. + + Args: + request (~.gcdc_webhook.CreateWebhookRequest): + The request object. The request message for + [Webhooks.CreateWebhook][google.cloud.dialogflow.cx.v3.Webhooks.CreateWebhook]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_webhook.Webhook: + Webhooks host the developer's + business logic. During a session, + webhooks allow the developer to use the + data extracted by Dialogflow's natural + language processing to generate dynamic + responses, validate collected data, or + trigger actions on the backend. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/webhooks", + "body": "webhook", + }, + ] + request, metadata = self._interceptor.pre_create_webhook(request, metadata) + pb_request = gcdc_webhook.CreateWebhookRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_webhook.Webhook() + pb_resp = gcdc_webhook.Webhook.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_webhook(resp) + return resp + + class _DeleteWebhook(WebhooksRestStub): + def __hash__(self): + return hash("DeleteWebhook") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: webhook.DeleteWebhookRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete webhook method over HTTP. + + Args: + request (~.webhook.DeleteWebhookRequest): + The request object. The request message for + [Webhooks.DeleteWebhook][google.cloud.dialogflow.cx.v3.Webhooks.DeleteWebhook]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3/{name=projects/*/locations/*/agents/*/webhooks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_webhook(request, metadata) + pb_request = webhook.DeleteWebhookRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetWebhook(WebhooksRestStub): + def __hash__(self): + return hash("GetWebhook") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: webhook.GetWebhookRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> webhook.Webhook: + r"""Call the get webhook method over HTTP. + + Args: + request (~.webhook.GetWebhookRequest): + The request object. The request message for + [Webhooks.GetWebhook][google.cloud.dialogflow.cx.v3.Webhooks.GetWebhook]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.webhook.Webhook: + Webhooks host the developer's + business logic. During a session, + webhooks allow the developer to use the + data extracted by Dialogflow's natural + language processing to generate dynamic + responses, validate collected data, or + trigger actions on the backend. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/agents/*/webhooks/*}", + }, + ] + request, metadata = self._interceptor.pre_get_webhook(request, metadata) + pb_request = webhook.GetWebhookRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = webhook.Webhook() + pb_resp = webhook.Webhook.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_webhook(resp) + return resp + + class _ListWebhooks(WebhooksRestStub): + def __hash__(self): + return hash("ListWebhooks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: webhook.ListWebhooksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> webhook.ListWebhooksResponse: + r"""Call the list webhooks method over HTTP. + + Args: + request (~.webhook.ListWebhooksRequest): + The request object. The request message for + [Webhooks.ListWebhooks][google.cloud.dialogflow.cx.v3.Webhooks.ListWebhooks]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.webhook.ListWebhooksResponse: + The response message for + [Webhooks.ListWebhooks][google.cloud.dialogflow.cx.v3.Webhooks.ListWebhooks]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/webhooks", + }, + ] + request, metadata = self._interceptor.pre_list_webhooks(request, metadata) + pb_request = webhook.ListWebhooksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = webhook.ListWebhooksResponse() + pb_resp = webhook.ListWebhooksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_webhooks(resp) + return resp + + class _UpdateWebhook(WebhooksRestStub): + def __hash__(self): + return hash("UpdateWebhook") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcdc_webhook.UpdateWebhookRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcdc_webhook.Webhook: + r"""Call the update webhook method over HTTP. + + Args: + request (~.gcdc_webhook.UpdateWebhookRequest): + The request object. The request message for + [Webhooks.UpdateWebhook][google.cloud.dialogflow.cx.v3.Webhooks.UpdateWebhook]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcdc_webhook.Webhook: + Webhooks host the developer's + business logic. During a session, + webhooks allow the developer to use the + data extracted by Dialogflow's natural + language processing to generate dynamic + responses, validate collected data, or + trigger actions on the backend. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3/{webhook.name=projects/*/locations/*/agents/*/webhooks/*}", + "body": "webhook", + }, + ] + request, metadata = self._interceptor.pre_update_webhook(request, metadata) + pb_request = gcdc_webhook.UpdateWebhookRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcdc_webhook.Webhook() + pb_resp = gcdc_webhook.Webhook.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_webhook(resp) + return resp + + @property + def create_webhook( + self, + ) -> Callable[[gcdc_webhook.CreateWebhookRequest], gcdc_webhook.Webhook]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWebhook(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_webhook( + self, + ) -> Callable[[webhook.DeleteWebhookRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWebhook(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_webhook(self) -> Callable[[webhook.GetWebhookRequest], webhook.Webhook]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWebhook(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_webhooks( + self, + ) -> Callable[[webhook.ListWebhooksRequest], webhook.ListWebhooksResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWebhooks(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_webhook( + self, + ) -> Callable[[gcdc_webhook.UpdateWebhookRequest], gcdc_webhook.Webhook]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWebhook(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(WebhooksRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(WebhooksRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(WebhooksRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(WebhooksRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(WebhooksRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("WebhooksRestTransport",) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/__init__.py new file mode 100644 index 000000000000..b72780f5f9f7 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/__init__.py @@ -0,0 +1,481 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .advanced_settings import AdvancedSettings +from .agent import ( + Agent, + AgentValidationResult, + CreateAgentRequest, + DeleteAgentRequest, + ExportAgentRequest, + ExportAgentResponse, + GetAgentRequest, + GetAgentValidationResultRequest, + GetGenerativeSettingsRequest, + ListAgentsRequest, + ListAgentsResponse, + RestoreAgentRequest, + SpeechToTextSettings, + UpdateAgentRequest, + UpdateGenerativeSettingsRequest, + ValidateAgentRequest, +) +from .audio_config import ( + AudioEncoding, + InputAudioConfig, + OutputAudioConfig, + OutputAudioEncoding, + SpeechModelVariant, + SpeechWordInfo, + SsmlVoiceGender, + SynthesizeSpeechConfig, + TextToSpeechSettings, + VoiceSelectionParams, +) +from .changelog import ( + Changelog, + GetChangelogRequest, + ListChangelogsRequest, + ListChangelogsResponse, +) +from .data_store_connection import DataStoreConnection, DataStoreType +from .deployment import ( + Deployment, + GetDeploymentRequest, + ListDeploymentsRequest, + ListDeploymentsResponse, +) +from .entity_type import ( + CreateEntityTypeRequest, + DeleteEntityTypeRequest, + EntityType, + GetEntityTypeRequest, + ListEntityTypesRequest, + ListEntityTypesResponse, + UpdateEntityTypeRequest, +) +from .environment import ( + ContinuousTestResult, + CreateEnvironmentRequest, + DeleteEnvironmentRequest, + DeployFlowMetadata, + DeployFlowRequest, + DeployFlowResponse, + Environment, + GetEnvironmentRequest, + ListContinuousTestResultsRequest, + ListContinuousTestResultsResponse, + ListEnvironmentsRequest, + ListEnvironmentsResponse, + LookupEnvironmentHistoryRequest, + LookupEnvironmentHistoryResponse, + RunContinuousTestMetadata, + RunContinuousTestRequest, + RunContinuousTestResponse, + UpdateEnvironmentRequest, +) +from .experiment import ( + CreateExperimentRequest, + DeleteExperimentRequest, + Experiment, + GetExperimentRequest, + ListExperimentsRequest, + ListExperimentsResponse, + RolloutConfig, + RolloutState, + StartExperimentRequest, + StopExperimentRequest, + UpdateExperimentRequest, + VariantsHistory, + VersionVariants, +) +from .flow import ( + CreateFlowRequest, + DeleteFlowRequest, + ExportFlowRequest, + ExportFlowResponse, + Flow, + FlowImportStrategy, + FlowValidationResult, + GetFlowRequest, + GetFlowValidationResultRequest, + ImportFlowRequest, + ImportFlowResponse, + ListFlowsRequest, + ListFlowsResponse, + NluSettings, + TrainFlowRequest, + UpdateFlowRequest, + ValidateFlowRequest, +) +from .fulfillment import Fulfillment +from .gcs import GcsDestination +from .generative_settings import GenerativeSettings +from .intent import ( + CreateIntentRequest, + DeleteIntentRequest, + GetIntentRequest, + Intent, + IntentView, + ListIntentsRequest, + ListIntentsResponse, + UpdateIntentRequest, +) +from .page import ( + CreatePageRequest, + DeletePageRequest, + EventHandler, + Form, + GetPageRequest, + KnowledgeConnectorSettings, + ListPagesRequest, + ListPagesResponse, + Page, + TransitionRoute, + UpdatePageRequest, +) +from .response_message import ResponseMessage +from .safety_settings import SafetySettings +from .security_settings import ( + CreateSecuritySettingsRequest, + DeleteSecuritySettingsRequest, + GetSecuritySettingsRequest, + ListSecuritySettingsRequest, + ListSecuritySettingsResponse, + SecuritySettings, + UpdateSecuritySettingsRequest, +) +from .session import ( + AudioInput, + CloudConversationDebuggingInfo, + DetectIntentRequest, + DetectIntentResponse, + DtmfInput, + EventInput, + FulfillIntentRequest, + FulfillIntentResponse, + IntentInput, + Match, + MatchIntentRequest, + MatchIntentResponse, + QueryInput, + QueryParameters, + QueryResult, + SentimentAnalysisResult, + StreamingDetectIntentRequest, + StreamingDetectIntentResponse, + StreamingRecognitionResult, + TextInput, +) +from .session_entity_type import ( + CreateSessionEntityTypeRequest, + DeleteSessionEntityTypeRequest, + GetSessionEntityTypeRequest, + ListSessionEntityTypesRequest, + ListSessionEntityTypesResponse, + SessionEntityType, + UpdateSessionEntityTypeRequest, +) +from .test_case import ( + BatchDeleteTestCasesRequest, + BatchRunTestCasesMetadata, + BatchRunTestCasesRequest, + BatchRunTestCasesResponse, + CalculateCoverageRequest, + CalculateCoverageResponse, + ConversationTurn, + CreateTestCaseRequest, + ExportTestCasesMetadata, + ExportTestCasesRequest, + ExportTestCasesResponse, + GetTestCaseRequest, + GetTestCaseResultRequest, + ImportTestCasesMetadata, + ImportTestCasesRequest, + ImportTestCasesResponse, + IntentCoverage, + ListTestCaseResultsRequest, + ListTestCaseResultsResponse, + ListTestCasesRequest, + ListTestCasesResponse, + RunTestCaseMetadata, + RunTestCaseRequest, + RunTestCaseResponse, + TestCase, + TestCaseError, + TestCaseResult, + TestConfig, + TestError, + TestResult, + TestRunDifference, + TransitionCoverage, + TransitionRouteGroupCoverage, + UpdateTestCaseRequest, +) +from .transition_route_group import ( + CreateTransitionRouteGroupRequest, + DeleteTransitionRouteGroupRequest, + GetTransitionRouteGroupRequest, + ListTransitionRouteGroupsRequest, + ListTransitionRouteGroupsResponse, + TransitionRouteGroup, + UpdateTransitionRouteGroupRequest, +) +from .validation_message import ResourceName, ValidationMessage +from .version import ( + CompareVersionsRequest, + CompareVersionsResponse, + CreateVersionOperationMetadata, + CreateVersionRequest, + DeleteVersionRequest, + GetVersionRequest, + ListVersionsRequest, + ListVersionsResponse, + LoadVersionRequest, + UpdateVersionRequest, + Version, +) +from .webhook import ( + CreateWebhookRequest, + DeleteWebhookRequest, + GetWebhookRequest, + ListWebhooksRequest, + ListWebhooksResponse, + PageInfo, + SessionInfo, + UpdateWebhookRequest, + Webhook, + WebhookRequest, + WebhookResponse, +) + +__all__ = ( + "AdvancedSettings", + "Agent", + "AgentValidationResult", + "CreateAgentRequest", + "DeleteAgentRequest", + "ExportAgentRequest", + "ExportAgentResponse", + "GetAgentRequest", + "GetAgentValidationResultRequest", + "GetGenerativeSettingsRequest", + "ListAgentsRequest", + "ListAgentsResponse", + "RestoreAgentRequest", + "SpeechToTextSettings", + "UpdateAgentRequest", + "UpdateGenerativeSettingsRequest", + "ValidateAgentRequest", + "InputAudioConfig", + "OutputAudioConfig", + "SpeechWordInfo", + "SynthesizeSpeechConfig", + "TextToSpeechSettings", + "VoiceSelectionParams", + "AudioEncoding", + "OutputAudioEncoding", + "SpeechModelVariant", + "SsmlVoiceGender", + "Changelog", + "GetChangelogRequest", + "ListChangelogsRequest", + "ListChangelogsResponse", + "DataStoreConnection", + "DataStoreType", + "Deployment", + "GetDeploymentRequest", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "CreateEntityTypeRequest", + "DeleteEntityTypeRequest", + "EntityType", + "GetEntityTypeRequest", + "ListEntityTypesRequest", + "ListEntityTypesResponse", + "UpdateEntityTypeRequest", + "ContinuousTestResult", + "CreateEnvironmentRequest", + "DeleteEnvironmentRequest", + "DeployFlowMetadata", + "DeployFlowRequest", + "DeployFlowResponse", + "Environment", + "GetEnvironmentRequest", + "ListContinuousTestResultsRequest", + "ListContinuousTestResultsResponse", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", + "LookupEnvironmentHistoryRequest", + "LookupEnvironmentHistoryResponse", + "RunContinuousTestMetadata", + "RunContinuousTestRequest", + "RunContinuousTestResponse", + "UpdateEnvironmentRequest", + "CreateExperimentRequest", + "DeleteExperimentRequest", + "Experiment", + "GetExperimentRequest", + "ListExperimentsRequest", + "ListExperimentsResponse", + "RolloutConfig", + "RolloutState", + "StartExperimentRequest", + "StopExperimentRequest", + "UpdateExperimentRequest", + "VariantsHistory", + "VersionVariants", + "CreateFlowRequest", + "DeleteFlowRequest", + "ExportFlowRequest", + "ExportFlowResponse", + "Flow", + "FlowImportStrategy", + "FlowValidationResult", + "GetFlowRequest", + "GetFlowValidationResultRequest", + "ImportFlowRequest", + "ImportFlowResponse", + "ListFlowsRequest", + "ListFlowsResponse", + "NluSettings", + "TrainFlowRequest", + "UpdateFlowRequest", + "ValidateFlowRequest", + "Fulfillment", + "GcsDestination", + "GenerativeSettings", + "ImportStrategy", + "CreateIntentRequest", + "DeleteIntentRequest", + "GetIntentRequest", + "Intent", + "ListIntentsRequest", + "ListIntentsResponse", + "UpdateIntentRequest", + "IntentView", + "CreatePageRequest", + "DeletePageRequest", + "EventHandler", + "Form", + "GetPageRequest", + "KnowledgeConnectorSettings", + "ListPagesRequest", + "ListPagesResponse", + "Page", + "TransitionRoute", + "UpdatePageRequest", + "ResponseMessage", + "SafetySettings", + "CreateSecuritySettingsRequest", + "DeleteSecuritySettingsRequest", + "GetSecuritySettingsRequest", + "ListSecuritySettingsRequest", + "ListSecuritySettingsResponse", + "SecuritySettings", + "UpdateSecuritySettingsRequest", + "AudioInput", + "CloudConversationDebuggingInfo", + "DetectIntentRequest", + "DetectIntentResponse", + "DtmfInput", + "EventInput", + "FulfillIntentRequest", + "FulfillIntentResponse", + "IntentInput", + "Match", + "MatchIntentRequest", + "MatchIntentResponse", + "QueryInput", + "QueryParameters", + "QueryResult", + "SentimentAnalysisResult", + "StreamingDetectIntentRequest", + "StreamingDetectIntentResponse", + "StreamingRecognitionResult", + "TextInput", + "CreateSessionEntityTypeRequest", + "DeleteSessionEntityTypeRequest", + "GetSessionEntityTypeRequest", + "ListSessionEntityTypesRequest", + "ListSessionEntityTypesResponse", + "SessionEntityType", + "UpdateSessionEntityTypeRequest", + "BatchDeleteTestCasesRequest", + "BatchRunTestCasesMetadata", + "BatchRunTestCasesRequest", + "BatchRunTestCasesResponse", + "CalculateCoverageRequest", + "CalculateCoverageResponse", + "ConversationTurn", + "CreateTestCaseRequest", + "ExportTestCasesMetadata", + "ExportTestCasesRequest", + "ExportTestCasesResponse", + "GetTestCaseRequest", + "GetTestCaseResultRequest", + "ImportTestCasesMetadata", + "ImportTestCasesRequest", + "ImportTestCasesResponse", + "IntentCoverage", + "ListTestCaseResultsRequest", + "ListTestCaseResultsResponse", + "ListTestCasesRequest", + "ListTestCasesResponse", + "RunTestCaseMetadata", + "RunTestCaseRequest", + "RunTestCaseResponse", + "TestCase", + "TestCaseError", + "TestCaseResult", + "TestConfig", + "TestError", + "TestRunDifference", + "TransitionCoverage", + "TransitionRouteGroupCoverage", + "UpdateTestCaseRequest", + "TestResult", + "CreateTransitionRouteGroupRequest", + "DeleteTransitionRouteGroupRequest", + "GetTransitionRouteGroupRequest", + "ListTransitionRouteGroupsRequest", + "ListTransitionRouteGroupsResponse", + "TransitionRouteGroup", + "UpdateTransitionRouteGroupRequest", + "ResourceName", + "ValidationMessage", + "CompareVersionsRequest", + "CompareVersionsResponse", + "CreateVersionOperationMetadata", + "CreateVersionRequest", + "DeleteVersionRequest", + "GetVersionRequest", + "ListVersionsRequest", + "ListVersionsResponse", + "LoadVersionRequest", + "UpdateVersionRequest", + "Version", + "CreateWebhookRequest", + "DeleteWebhookRequest", + "GetWebhookRequest", + "ListWebhooksRequest", + "ListWebhooksResponse", + "PageInfo", + "SessionInfo", + "UpdateWebhookRequest", + "Webhook", + "WebhookRequest", + "WebhookResponse", +) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/advanced_settings.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/advanced_settings.py new file mode 100644 index 000000000000..83e5ca206895 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/advanced_settings.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dialogflowcx_v3.types import gcs + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "AdvancedSettings", + }, +) + + +class AdvancedSettings(proto.Message): + r"""Hierarchical advanced settings for + agent/flow/page/fulfillment/parameter. Settings exposed at lower + level overrides the settings exposed at higher level. Overriding + occurs at the sub-setting level. For example, the + playback_interruption_settings at fulfillment level only overrides + the playback_interruption_settings at the agent level, leaving other + settings at the agent level unchanged. + + DTMF settings does not override each other. DTMF settings set at + different levels define DTMF detections running in parallel. + + Hierarchy: Agent->Flow->Page->Fulfillment/Parameter. + + Attributes: + audio_export_gcs_destination (google.cloud.dialogflowcx_v3.types.GcsDestination): + If present, incoming audio is exported by + Dialogflow to the configured Google Cloud + Storage destination. Exposed at the following + levels: + + - Agent level + - Flow level + logging_settings (google.cloud.dialogflowcx_v3.types.AdvancedSettings.LoggingSettings): + Settings for logging. + Settings for Dialogflow History, Contact Center + messages, StackDriver logs, and speech logging. + Exposed at the following levels: + + - Agent level. + """ + + class LoggingSettings(proto.Message): + r"""Define behaviors on logging. + + Attributes: + enable_stackdriver_logging (bool): + If true, StackDriver logging is currently + enabled. + enable_interaction_logging (bool): + If true, DF Interaction logging is currently + enabled. + """ + + enable_stackdriver_logging: bool = proto.Field( + proto.BOOL, + number=2, + ) + enable_interaction_logging: bool = proto.Field( + proto.BOOL, + number=3, + ) + + audio_export_gcs_destination: gcs.GcsDestination = proto.Field( + proto.MESSAGE, + number=2, + message=gcs.GcsDestination, + ) + logging_settings: LoggingSettings = proto.Field( + proto.MESSAGE, + number=6, + message=LoggingSettings, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/agent.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/agent.py new file mode 100644 index 000000000000..96c1e740980a --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/agent.py @@ -0,0 +1,824 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.dialogflowcx_v3.types import ( + advanced_settings as gcdc_advanced_settings, +) +from google.cloud.dialogflowcx_v3.types import ( + generative_settings as gcdc_generative_settings, +) +from google.cloud.dialogflowcx_v3.types import audio_config, flow + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "SpeechToTextSettings", + "Agent", + "ListAgentsRequest", + "ListAgentsResponse", + "GetAgentRequest", + "CreateAgentRequest", + "UpdateAgentRequest", + "DeleteAgentRequest", + "ExportAgentRequest", + "ExportAgentResponse", + "RestoreAgentRequest", + "ValidateAgentRequest", + "GetAgentValidationResultRequest", + "AgentValidationResult", + "GetGenerativeSettingsRequest", + "UpdateGenerativeSettingsRequest", + }, +) + + +class SpeechToTextSettings(proto.Message): + r"""Settings related to speech recognition. + + Attributes: + enable_speech_adaptation (bool): + Whether to use speech adaptation for speech + recognition. + """ + + enable_speech_adaptation: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class Agent(proto.Message): + r"""Agents are best described as Natural Language Understanding (NLU) + modules that transform user requests into actionable data. You can + include agents in your app, product, or service to determine user + intent and respond to the user in a natural way. + + After you create an agent, you can add + [Intents][google.cloud.dialogflow.cx.v3.Intent], [Entity + Types][google.cloud.dialogflow.cx.v3.EntityType], + [Flows][google.cloud.dialogflow.cx.v3.Flow], + [Fulfillments][google.cloud.dialogflow.cx.v3.Fulfillment], + [Webhooks][google.cloud.dialogflow.cx.v3.Webhook], + [TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + and so on to manage the conversation flows. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The unique identifier of the agent. Required for the + [Agents.UpdateAgent][google.cloud.dialogflow.cx.v3.Agents.UpdateAgent] + method. + [Agents.CreateAgent][google.cloud.dialogflow.cx.v3.Agents.CreateAgent] + populates the name automatically. Format: + ``projects//locations//agents/``. + display_name (str): + Required. The human-readable name of the + agent, unique within the location. + default_language_code (str): + Required. Immutable. The default language of the agent as a + language tag. See `Language + Support `__ + for a list of the currently supported language codes. This + field cannot be set by the + [Agents.UpdateAgent][google.cloud.dialogflow.cx.v3.Agents.UpdateAgent] + method. + supported_language_codes (MutableSequence[str]): + The list of all languages supported by the agent (except for + the ``default_language_code``). + time_zone (str): + Required. The time zone of the agent from the `time zone + database `__, e.g., + America/New_York, Europe/Paris. + description (str): + The description of the agent. The maximum + length is 500 characters. If exceeded, the + request is rejected. + avatar_uri (str): + The URI of the agent's avatar. Avatars are used throughout + the Dialogflow console and in the self-hosted `Web + Demo `__ + integration. + speech_to_text_settings (google.cloud.dialogflowcx_v3.types.SpeechToTextSettings): + Speech recognition related settings. + start_flow (str): + Immutable. Name of the start flow in this agent. A start + flow will be automatically created when the agent is + created, and can only be deleted by deleting the agent. + Format: + ``projects//locations//agents//flows/``. + security_settings (str): + Name of the + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] + reference for the agent. Format: + ``projects//locations//securitySettings/``. + enable_stackdriver_logging (bool): + Indicates if stackdriver logging is enabled for the agent. + Please use + [agent.advanced_settings][google.cloud.dialogflow.cx.v3.AdvancedSettings.LoggingSettings] + instead. + enable_spell_correction (bool): + Indicates if automatic spell correction is + enabled in detect intent requests. + locked (bool): + Indicates whether the agent is locked for changes. If the + agent is locked, modifications to the agent will be rejected + except for [RestoreAgent][]. + advanced_settings (google.cloud.dialogflowcx_v3.types.AdvancedSettings): + Hierarchical advanced settings for this + agent. The settings exposed at the lower level + overrides the settings exposed at the higher + level. + git_integration_settings (google.cloud.dialogflowcx_v3.types.Agent.GitIntegrationSettings): + Git integration settings for this agent. + text_to_speech_settings (google.cloud.dialogflowcx_v3.types.TextToSpeechSettings): + Settings on instructing the speech + synthesizer on how to generate the output audio + content. + gen_app_builder_settings (google.cloud.dialogflowcx_v3.types.Agent.GenAppBuilderSettings): + Gen App Builder-related agent-level settings. + + This field is a member of `oneof`_ ``_gen_app_builder_settings``. + """ + + class GitIntegrationSettings(proto.Message): + r"""Settings for connecting to Git repository for an agent. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + github_settings (google.cloud.dialogflowcx_v3.types.Agent.GitIntegrationSettings.GithubSettings): + GitHub settings. + + This field is a member of `oneof`_ ``git_settings``. + """ + + class GithubSettings(proto.Message): + r"""Settings of integration with GitHub. + + Attributes: + display_name (str): + The unique repository display name for the + GitHub repository. + repository_uri (str): + The GitHub repository URI related to the + agent. + tracking_branch (str): + The branch of the GitHub repository tracked + for this agent. + access_token (str): + The access token used to authenticate the + access to the GitHub repository. + branches (MutableSequence[str]): + A list of branches configured to be used from + Dialogflow. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + repository_uri: str = proto.Field( + proto.STRING, + number=2, + ) + tracking_branch: str = proto.Field( + proto.STRING, + number=3, + ) + access_token: str = proto.Field( + proto.STRING, + number=4, + ) + branches: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + github_settings: "Agent.GitIntegrationSettings.GithubSettings" = proto.Field( + proto.MESSAGE, + number=1, + oneof="git_settings", + message="Agent.GitIntegrationSettings.GithubSettings", + ) + + class GenAppBuilderSettings(proto.Message): + r"""Settings for Gen App Builder. + + Attributes: + engine (str): + Required. The full name of the Gen App Builder engine + related to this agent if there is one. Format: + ``projects/{Project ID}/locations/{Location ID}/collections/{Collection ID}/engines/{Engine ID}`` + """ + + engine: str = proto.Field( + proto.STRING, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + default_language_code: str = proto.Field( + proto.STRING, + number=3, + ) + supported_language_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + time_zone: str = proto.Field( + proto.STRING, + number=5, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + avatar_uri: str = proto.Field( + proto.STRING, + number=7, + ) + speech_to_text_settings: "SpeechToTextSettings" = proto.Field( + proto.MESSAGE, + number=13, + message="SpeechToTextSettings", + ) + start_flow: str = proto.Field( + proto.STRING, + number=16, + ) + security_settings: str = proto.Field( + proto.STRING, + number=17, + ) + enable_stackdriver_logging: bool = proto.Field( + proto.BOOL, + number=18, + ) + enable_spell_correction: bool = proto.Field( + proto.BOOL, + number=20, + ) + locked: bool = proto.Field( + proto.BOOL, + number=27, + ) + advanced_settings: gcdc_advanced_settings.AdvancedSettings = proto.Field( + proto.MESSAGE, + number=22, + message=gcdc_advanced_settings.AdvancedSettings, + ) + git_integration_settings: GitIntegrationSettings = proto.Field( + proto.MESSAGE, + number=30, + message=GitIntegrationSettings, + ) + text_to_speech_settings: audio_config.TextToSpeechSettings = proto.Field( + proto.MESSAGE, + number=31, + message=audio_config.TextToSpeechSettings, + ) + gen_app_builder_settings: GenAppBuilderSettings = proto.Field( + proto.MESSAGE, + number=33, + optional=True, + message=GenAppBuilderSettings, + ) + + +class ListAgentsRequest(proto.Message): + r"""The request message for + [Agents.ListAgents][google.cloud.dialogflow.cx.v3.Agents.ListAgents]. + + Attributes: + parent (str): + Required. The location to list all agents for. Format: + ``projects//locations/``. + page_size (int): + The maximum number of items to return in a + single page. By default 100 and at most 1000. + page_token (str): + The next_page_token value returned from a previous list + request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAgentsResponse(proto.Message): + r"""The response message for + [Agents.ListAgents][google.cloud.dialogflow.cx.v3.Agents.ListAgents]. + + Attributes: + agents (MutableSequence[google.cloud.dialogflowcx_v3.types.Agent]): + The list of agents. There will be a maximum number of items + returned based on the page_size field in the request. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + agents: MutableSequence["Agent"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Agent", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetAgentRequest(proto.Message): + r"""The request message for + [Agents.GetAgent][google.cloud.dialogflow.cx.v3.Agents.GetAgent]. + + Attributes: + name (str): + Required. The name of the agent. Format: + ``projects//locations//agents/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateAgentRequest(proto.Message): + r"""The request message for + [Agents.CreateAgent][google.cloud.dialogflow.cx.v3.Agents.CreateAgent]. + + Attributes: + parent (str): + Required. The location to create a agent for. Format: + ``projects//locations/``. + agent (google.cloud.dialogflowcx_v3.types.Agent): + Required. The agent to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + agent: "Agent" = proto.Field( + proto.MESSAGE, + number=2, + message="Agent", + ) + + +class UpdateAgentRequest(proto.Message): + r"""The request message for + [Agents.UpdateAgent][google.cloud.dialogflow.cx.v3.Agents.UpdateAgent]. + + Attributes: + agent (google.cloud.dialogflowcx_v3.types.Agent): + Required. The agent to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get updated. + If the mask is not present, all fields will be + updated. + """ + + agent: "Agent" = proto.Field( + proto.MESSAGE, + number=1, + message="Agent", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteAgentRequest(proto.Message): + r"""The request message for + [Agents.DeleteAgent][google.cloud.dialogflow.cx.v3.Agents.DeleteAgent]. + + Attributes: + name (str): + Required. The name of the agent to delete. Format: + ``projects//locations//agents/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExportAgentRequest(proto.Message): + r"""The request message for + [Agents.ExportAgent][google.cloud.dialogflow.cx.v3.Agents.ExportAgent]. + + Attributes: + name (str): + Required. The name of the agent to export. Format: + ``projects//locations//agents/``. + agent_uri (str): + Optional. The `Google Cloud + Storage `__ URI to + export the agent to. The format of this URI must be + ``gs:///``. If left unspecified, + the serialized agent is returned inline. + + Dialogflow performs a write operation for the Cloud Storage + object on the caller's behalf, so your request + authentication must have write permissions for the object. + For more information, see `Dialogflow access + control `__. + data_format (google.cloud.dialogflowcx_v3.types.ExportAgentRequest.DataFormat): + Optional. The data format of the exported agent. If not + specified, ``BLOB`` is assumed. + environment (str): + Optional. Environment name. If not set, draft environment is + assumed. Format: + ``projects//locations//agents//environments/``. + git_destination (google.cloud.dialogflowcx_v3.types.ExportAgentRequest.GitDestination): + Optional. The Git branch to export the agent + to. + include_bigquery_export_settings (bool): + Optional. Whether to include BigQuery Export + setting. + """ + + class DataFormat(proto.Enum): + r"""Data format of the exported agent. + + Values: + DATA_FORMAT_UNSPECIFIED (0): + Unspecified format. + BLOB (1): + Agent content will be exported as raw bytes. + JSON_PACKAGE (4): + Agent content will be exported in JSON + Package format. + """ + DATA_FORMAT_UNSPECIFIED = 0 + BLOB = 1 + JSON_PACKAGE = 4 + + class GitDestination(proto.Message): + r"""Settings for exporting to a git branch. + + Attributes: + tracking_branch (str): + Tracking branch for the git push. + commit_message (str): + Commit message for the git push. + """ + + tracking_branch: str = proto.Field( + proto.STRING, + number=1, + ) + commit_message: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + agent_uri: str = proto.Field( + proto.STRING, + number=2, + ) + data_format: DataFormat = proto.Field( + proto.ENUM, + number=3, + enum=DataFormat, + ) + environment: str = proto.Field( + proto.STRING, + number=5, + ) + git_destination: GitDestination = proto.Field( + proto.MESSAGE, + number=6, + message=GitDestination, + ) + include_bigquery_export_settings: bool = proto.Field( + proto.BOOL, + number=7, + ) + + +class ExportAgentResponse(proto.Message): + r"""The response message for + [Agents.ExportAgent][google.cloud.dialogflow.cx.v3.Agents.ExportAgent]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + agent_uri (str): + The URI to a file containing the exported agent. This field + is populated if ``agent_uri`` is specified in + [ExportAgentRequest][google.cloud.dialogflow.cx.v3.ExportAgentRequest]. + + This field is a member of `oneof`_ ``agent``. + agent_content (bytes): + Uncompressed raw byte content for agent. This field is + populated if none of ``agent_uri`` and ``git_destination`` + are specified in + [ExportAgentRequest][google.cloud.dialogflow.cx.v3.ExportAgentRequest]. + + This field is a member of `oneof`_ ``agent``. + commit_sha (str): + Commit SHA of the git push. This field is populated if + ``git_destination`` is specified in + [ExportAgentRequest][google.cloud.dialogflow.cx.v3.ExportAgentRequest]. + + This field is a member of `oneof`_ ``agent``. + """ + + agent_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="agent", + ) + agent_content: bytes = proto.Field( + proto.BYTES, + number=2, + oneof="agent", + ) + commit_sha: str = proto.Field( + proto.STRING, + number=3, + oneof="agent", + ) + + +class RestoreAgentRequest(proto.Message): + r"""The request message for + [Agents.RestoreAgent][google.cloud.dialogflow.cx.v3.Agents.RestoreAgent]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The name of the agent to restore into. Format: + ``projects//locations//agents/``. + agent_uri (str): + The `Google Cloud + Storage `__ URI to + restore agent from. The format of this URI must be + ``gs:///``. + + Dialogflow performs a read operation for the Cloud Storage + object on the caller's behalf, so your request + authentication must have read permissions for the object. + For more information, see `Dialogflow access + control `__. + + This field is a member of `oneof`_ ``agent``. + agent_content (bytes): + Uncompressed raw byte content for agent. + + This field is a member of `oneof`_ ``agent``. + git_source (google.cloud.dialogflowcx_v3.types.RestoreAgentRequest.GitSource): + Setting for restoring from a git branch + + This field is a member of `oneof`_ ``agent``. + restore_option (google.cloud.dialogflowcx_v3.types.RestoreAgentRequest.RestoreOption): + Agent restore mode. If not specified, ``KEEP`` is assumed. + """ + + class RestoreOption(proto.Enum): + r"""Restore option. + + Values: + RESTORE_OPTION_UNSPECIFIED (0): + Unspecified. Treated as KEEP. + KEEP (1): + Always respect the settings from the exported + agent file. It may cause a restoration failure + if some settings (e.g. model type) are not + supported in the target agent. + FALLBACK (2): + Fallback to default settings if some settings + are not supported in the target agent. + """ + RESTORE_OPTION_UNSPECIFIED = 0 + KEEP = 1 + FALLBACK = 2 + + class GitSource(proto.Message): + r"""Settings for restoring from a git branch + + Attributes: + tracking_branch (str): + tracking branch for the git pull + """ + + tracking_branch: str = proto.Field( + proto.STRING, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + agent_uri: str = proto.Field( + proto.STRING, + number=2, + oneof="agent", + ) + agent_content: bytes = proto.Field( + proto.BYTES, + number=3, + oneof="agent", + ) + git_source: GitSource = proto.Field( + proto.MESSAGE, + number=6, + oneof="agent", + message=GitSource, + ) + restore_option: RestoreOption = proto.Field( + proto.ENUM, + number=5, + enum=RestoreOption, + ) + + +class ValidateAgentRequest(proto.Message): + r"""The request message for + [Agents.ValidateAgent][google.cloud.dialogflow.cx.v3.Agents.ValidateAgent]. + + Attributes: + name (str): + Required. The agent to validate. Format: + ``projects//locations//agents/``. + language_code (str): + If not specified, the agent's default + language is used. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetAgentValidationResultRequest(proto.Message): + r"""The request message for + [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. + + Attributes: + name (str): + Required. The agent name. Format: + ``projects//locations//agents//validationResult``. + language_code (str): + If not specified, the agent's default + language is used. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AgentValidationResult(proto.Message): + r"""The response message for + [Agents.GetAgentValidationResult][google.cloud.dialogflow.cx.v3.Agents.GetAgentValidationResult]. + + Attributes: + name (str): + The unique identifier of the agent validation result. + Format: + ``projects//locations//agents//validationResult``. + flow_validation_results (MutableSequence[google.cloud.dialogflowcx_v3.types.FlowValidationResult]): + Contains all flow validation results. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + flow_validation_results: MutableSequence[ + flow.FlowValidationResult + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=flow.FlowValidationResult, + ) + + +class GetGenerativeSettingsRequest(proto.Message): + r"""Request for + [GetGenerativeSettings][google.cloud.dialogflow.cx.v3.Agents.GetGenerativeSettings] + RPC. + + Attributes: + name (str): + Required. Format: + ``projects//locations//agents//generativeSettings``. + language_code (str): + Required. Language code of the generative + settings. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateGenerativeSettingsRequest(proto.Message): + r"""Request for + [UpdateGenerativeSettings][google.cloud.dialogflow.cx.v3.Agents.UpdateGenerativeSettings] + RPC. + + Attributes: + generative_settings (google.cloud.dialogflowcx_v3.types.GenerativeSettings): + Required. Generative settings to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The mask to control which fields + get updated. If the mask is not present, all + fields will be updated. + """ + + generative_settings: gcdc_generative_settings.GenerativeSettings = proto.Field( + proto.MESSAGE, + number=1, + message=gcdc_generative_settings.GenerativeSettings, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/audio_config.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/audio_config.py new file mode 100644 index 000000000000..7120476eedac --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/audio_config.py @@ -0,0 +1,518 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "AudioEncoding", + "SpeechModelVariant", + "SsmlVoiceGender", + "OutputAudioEncoding", + "SpeechWordInfo", + "InputAudioConfig", + "VoiceSelectionParams", + "SynthesizeSpeechConfig", + "OutputAudioConfig", + "TextToSpeechSettings", + }, +) + + +class AudioEncoding(proto.Enum): + r"""Audio encoding of the audio content sent in the conversational query + request. Refer to the `Cloud Speech API + documentation `__ + for more details. + + Values: + AUDIO_ENCODING_UNSPECIFIED (0): + Not specified. + AUDIO_ENCODING_LINEAR_16 (1): + Uncompressed 16-bit signed little-endian + samples (Linear PCM). + AUDIO_ENCODING_FLAC (2): + ```FLAC`` `__ + (Free Lossless Audio Codec) is the recommended encoding + because it is lossless (therefore recognition is not + compromised) and requires only about half the bandwidth of + ``LINEAR16``. ``FLAC`` stream encoding supports 16-bit and + 24-bit samples, however, not all fields in ``STREAMINFO`` + are supported. + AUDIO_ENCODING_MULAW (3): + 8-bit samples that compand 14-bit audio + samples using G.711 PCMU/mu-law. + AUDIO_ENCODING_AMR (4): + Adaptive Multi-Rate Narrowband codec. ``sample_rate_hertz`` + must be 8000. + AUDIO_ENCODING_AMR_WB (5): + Adaptive Multi-Rate Wideband codec. ``sample_rate_hertz`` + must be 16000. + AUDIO_ENCODING_OGG_OPUS (6): + Opus encoded audio frames in Ogg container + (`OggOpus `__). + ``sample_rate_hertz`` must be 16000. + AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE (7): + Although the use of lossy encodings is not recommended, if a + very low bitrate encoding is required, ``OGG_OPUS`` is + highly preferred over Speex encoding. The + `Speex `__ encoding supported by + Dialogflow API has a header byte in each block, as in MIME + type ``audio/x-speex-with-header-byte``. It is a variant of + the RTP Speex encoding defined in `RFC + 5574 `__. The stream is + a sequence of blocks, one block per RTP packet. Each block + starts with a byte containing the length of the block, in + bytes, followed by one or more frames of Speex data, padded + to an integral number of bytes (octets) as specified in RFC + 5574. In other words, each RTP header is replaced with a + single byte containing the block length. Only Speex wideband + is supported. ``sample_rate_hertz`` must be 16000. + """ + AUDIO_ENCODING_UNSPECIFIED = 0 + AUDIO_ENCODING_LINEAR_16 = 1 + AUDIO_ENCODING_FLAC = 2 + AUDIO_ENCODING_MULAW = 3 + AUDIO_ENCODING_AMR = 4 + AUDIO_ENCODING_AMR_WB = 5 + AUDIO_ENCODING_OGG_OPUS = 6 + AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE = 7 + + +class SpeechModelVariant(proto.Enum): + r"""Variant of the specified [Speech + model][google.cloud.dialogflow.cx.v3.InputAudioConfig.model] to use. + + See the `Cloud Speech + documentation `__ + for which models have different variants. For example, the + "phone_call" model has both a standard and an enhanced variant. When + you use an enhanced model, you will generally receive higher quality + results than for a standard model. + + Values: + SPEECH_MODEL_VARIANT_UNSPECIFIED (0): + No model variant specified. In this case Dialogflow defaults + to USE_BEST_AVAILABLE. + USE_BEST_AVAILABLE (1): + Use the best available variant of the [Speech + model][InputAudioConfig.model] that the caller is eligible + for. + + Please see the `Dialogflow + docs `__ + for how to make your project eligible for enhanced models. + USE_STANDARD (2): + Use standard model variant even if an enhanced model is + available. See the `Cloud Speech + documentation `__ + for details about enhanced models. + USE_ENHANCED (3): + Use an enhanced model variant: + + - If an enhanced variant does not exist for the given + [model][google.cloud.dialogflow.cx.v3.InputAudioConfig.model] + and request language, Dialogflow falls back to the + standard variant. + + The `Cloud Speech + documentation `__ + describes which models have enhanced variants. + + - If the API caller isn't eligible for enhanced models, + Dialogflow returns an error. Please see the `Dialogflow + docs `__ + for how to make your project eligible. + """ + SPEECH_MODEL_VARIANT_UNSPECIFIED = 0 + USE_BEST_AVAILABLE = 1 + USE_STANDARD = 2 + USE_ENHANCED = 3 + + +class SsmlVoiceGender(proto.Enum): + r"""Gender of the voice as described in `SSML voice + element `__. + + Values: + SSML_VOICE_GENDER_UNSPECIFIED (0): + An unspecified gender, which means that the + client doesn't care which gender the selected + voice will have. + SSML_VOICE_GENDER_MALE (1): + A male voice. + SSML_VOICE_GENDER_FEMALE (2): + A female voice. + SSML_VOICE_GENDER_NEUTRAL (3): + A gender-neutral voice. + """ + SSML_VOICE_GENDER_UNSPECIFIED = 0 + SSML_VOICE_GENDER_MALE = 1 + SSML_VOICE_GENDER_FEMALE = 2 + SSML_VOICE_GENDER_NEUTRAL = 3 + + +class OutputAudioEncoding(proto.Enum): + r"""Audio encoding of the output audio format in Text-To-Speech. + + Values: + OUTPUT_AUDIO_ENCODING_UNSPECIFIED (0): + Not specified. + OUTPUT_AUDIO_ENCODING_LINEAR_16 (1): + Uncompressed 16-bit signed little-endian + samples (Linear PCM). Audio content returned as + LINEAR16 also contains a WAV header. + OUTPUT_AUDIO_ENCODING_MP3 (2): + MP3 audio at 32kbps. + OUTPUT_AUDIO_ENCODING_MP3_64_KBPS (4): + MP3 audio at 64kbps. + OUTPUT_AUDIO_ENCODING_OGG_OPUS (3): + Opus encoded audio wrapped in an ogg + container. The result will be a file which can + be played natively on Android, and in browsers + (at least Chrome and Firefox). The quality of + the encoding is considerably higher than MP3 + while using approximately the same bitrate. + OUTPUT_AUDIO_ENCODING_MULAW (5): + 8-bit samples that compand 14-bit audio + samples using G.711 PCMU/mu-law. + """ + OUTPUT_AUDIO_ENCODING_UNSPECIFIED = 0 + OUTPUT_AUDIO_ENCODING_LINEAR_16 = 1 + OUTPUT_AUDIO_ENCODING_MP3 = 2 + OUTPUT_AUDIO_ENCODING_MP3_64_KBPS = 4 + OUTPUT_AUDIO_ENCODING_OGG_OPUS = 3 + OUTPUT_AUDIO_ENCODING_MULAW = 5 + + +class SpeechWordInfo(proto.Message): + r"""Information for a word recognized by the speech recognizer. + + Attributes: + word (str): + The word this info is for. + start_offset (google.protobuf.duration_pb2.Duration): + Time offset relative to the beginning of the + audio that corresponds to the start of the + spoken word. This is an experimental feature and + the accuracy of the time offset can vary. + end_offset (google.protobuf.duration_pb2.Duration): + Time offset relative to the beginning of the + audio that corresponds to the end of the spoken + word. This is an experimental feature and the + accuracy of the time offset can vary. + confidence (float): + The Speech confidence between 0.0 and 1.0 for + this word. A higher number indicates an + estimated greater likelihood that the recognized + word is correct. The default of 0.0 is a + sentinel value indicating that confidence was + not set. + + This field is not guaranteed to be fully stable + over time for the same audio input. Users should + also not rely on it to always be provided. + """ + + word: str = proto.Field( + proto.STRING, + number=3, + ) + start_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + end_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=4, + ) + + +class InputAudioConfig(proto.Message): + r"""Instructs the speech recognizer on how to process the audio + content. + + Attributes: + audio_encoding (google.cloud.dialogflowcx_v3.types.AudioEncoding): + Required. Audio encoding of the audio content + to process. + sample_rate_hertz (int): + Sample rate (in Hertz) of the audio content sent in the + query. Refer to `Cloud Speech API + documentation `__ + for more details. + enable_word_info (bool): + Optional. If ``true``, Dialogflow returns + [SpeechWordInfo][google.cloud.dialogflow.cx.v3.SpeechWordInfo] + in + [StreamingRecognitionResult][google.cloud.dialogflow.cx.v3.StreamingRecognitionResult] + with information about the recognized speech words, e.g. + start and end time offsets. If false or unspecified, Speech + doesn't return any word-level information. + phrase_hints (MutableSequence[str]): + Optional. A list of strings containing words and phrases + that the speech recognizer should recognize with higher + likelihood. + + See `the Cloud Speech + documentation `__ + for more details. + model (str): + Optional. Which Speech model to select for the given + request. Select the model best suited to your domain to get + best results. If a model is not explicitly specified, then + we auto-select a model based on the parameters in the + InputAudioConfig. If enhanced speech model is enabled for + the agent and an enhanced version of the specified model for + the language does not exist, then the speech is recognized + using the standard version of the specified model. Refer to + `Cloud Speech API + documentation `__ + for more details. If you specify a model, the following + models typically have the best performance: + + - phone_call (best for Agent Assist and telephony) + - latest_short (best for Dialogflow non-telephony) + - command_and_search (best for very short utterances and + commands) + model_variant (google.cloud.dialogflowcx_v3.types.SpeechModelVariant): + Optional. Which variant of the [Speech + model][google.cloud.dialogflow.cx.v3.InputAudioConfig.model] + to use. + single_utterance (bool): + Optional. If ``false`` (default), recognition does not cease + until the client closes the stream. If ``true``, the + recognizer will detect a single spoken utterance in input + audio. Recognition ceases when it detects the audio's voice + has stopped or paused. In this case, once a detected intent + is received, the client should close the stream and start a + new request with a new stream as needed. Note: This setting + is relevant only for streaming methods. + """ + + audio_encoding: "AudioEncoding" = proto.Field( + proto.ENUM, + number=1, + enum="AudioEncoding", + ) + sample_rate_hertz: int = proto.Field( + proto.INT32, + number=2, + ) + enable_word_info: bool = proto.Field( + proto.BOOL, + number=13, + ) + phrase_hints: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + model: str = proto.Field( + proto.STRING, + number=7, + ) + model_variant: "SpeechModelVariant" = proto.Field( + proto.ENUM, + number=10, + enum="SpeechModelVariant", + ) + single_utterance: bool = proto.Field( + proto.BOOL, + number=8, + ) + + +class VoiceSelectionParams(proto.Message): + r"""Description of which voice to use for speech synthesis. + + Attributes: + name (str): + Optional. The name of the voice. If not set, the service + will choose a voice based on the other parameters such as + language_code and + [ssml_gender][google.cloud.dialogflow.cx.v3.VoiceSelectionParams.ssml_gender]. + + For the list of available voices, please refer to `Supported + voices and + languages `__. + ssml_gender (google.cloud.dialogflowcx_v3.types.SsmlVoiceGender): + Optional. The preferred gender of the voice. If not set, the + service will choose a voice based on the other parameters + such as language_code and + [name][google.cloud.dialogflow.cx.v3.VoiceSelectionParams.name]. + Note that this is only a preference, not requirement. If a + voice of the appropriate gender is not available, the + synthesizer substitutes a voice with a different gender + rather than failing the request. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + ssml_gender: "SsmlVoiceGender" = proto.Field( + proto.ENUM, + number=2, + enum="SsmlVoiceGender", + ) + + +class SynthesizeSpeechConfig(proto.Message): + r"""Configuration of how speech should be synthesized. + + Attributes: + speaking_rate (float): + Optional. Speaking rate/speed, in the range [0.25, 4.0]. 1.0 + is the normal native speed supported by the specific voice. + 2.0 is twice as fast, and 0.5 is half as fast. If + unset(0.0), defaults to the native 1.0 speed. Any other + values < 0.25 or > 4.0 will return an error. + pitch (float): + Optional. Speaking pitch, in the range [-20.0, 20.0]. 20 + means increase 20 semitones from the original pitch. -20 + means decrease 20 semitones from the original pitch. + volume_gain_db (float): + Optional. Volume gain (in dB) of the normal native volume + supported by the specific voice, in the range [-96.0, 16.0]. + If unset, or set to a value of 0.0 (dB), will play at normal + native signal amplitude. A value of -6.0 (dB) will play at + approximately half the amplitude of the normal native signal + amplitude. A value of +6.0 (dB) will play at approximately + twice the amplitude of the normal native signal amplitude. + We strongly recommend not to exceed +10 (dB) as there's + usually no effective increase in loudness for any value + greater than that. + effects_profile_id (MutableSequence[str]): + Optional. An identifier which selects 'audio + effects' profiles that are applied on (post + synthesized) text to speech. Effects are applied + on top of each other in the order they are + given. + voice (google.cloud.dialogflowcx_v3.types.VoiceSelectionParams): + Optional. The desired voice of the + synthesized audio. + """ + + speaking_rate: float = proto.Field( + proto.DOUBLE, + number=1, + ) + pitch: float = proto.Field( + proto.DOUBLE, + number=2, + ) + volume_gain_db: float = proto.Field( + proto.DOUBLE, + number=3, + ) + effects_profile_id: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + voice: "VoiceSelectionParams" = proto.Field( + proto.MESSAGE, + number=4, + message="VoiceSelectionParams", + ) + + +class OutputAudioConfig(proto.Message): + r"""Instructs the speech synthesizer how to generate the output + audio content. + + Attributes: + audio_encoding (google.cloud.dialogflowcx_v3.types.OutputAudioEncoding): + Required. Audio encoding of the synthesized + audio content. + sample_rate_hertz (int): + Optional. The synthesis sample rate (in + hertz) for this audio. If not provided, then the + synthesizer will use the default sample rate + based on the audio encoding. If this is + different from the voice's natural sample rate, + then the synthesizer will honor this request by + converting to the desired sample rate (which + might result in worse audio quality). + synthesize_speech_config (google.cloud.dialogflowcx_v3.types.SynthesizeSpeechConfig): + Optional. Configuration of how speech should be synthesized. + If not specified, + [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings] + is applied. + """ + + audio_encoding: "OutputAudioEncoding" = proto.Field( + proto.ENUM, + number=1, + enum="OutputAudioEncoding", + ) + sample_rate_hertz: int = proto.Field( + proto.INT32, + number=2, + ) + synthesize_speech_config: "SynthesizeSpeechConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="SynthesizeSpeechConfig", + ) + + +class TextToSpeechSettings(proto.Message): + r"""Settings related to speech synthesizing. + + Attributes: + synthesize_speech_configs (MutableMapping[str, google.cloud.dialogflowcx_v3.types.SynthesizeSpeechConfig]): + Configuration of how speech should be synthesized, mapping + from language + (https://cloud.google.com/dialogflow/cx/docs/reference/language) + to SynthesizeSpeechConfig. + + These settings affect: + + - The `phone + gateway `__ + synthesize configuration set via + [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings]. + + - How speech is synthesized when invoking + [session][google.cloud.dialogflow.cx.v3.Sessions] APIs. + [Agent.text_to_speech_settings][google.cloud.dialogflow.cx.v3.Agent.text_to_speech_settings] + only applies if + [OutputAudioConfig.synthesize_speech_config][google.cloud.dialogflow.cx.v3.OutputAudioConfig.synthesize_speech_config] + is not specified. + """ + + synthesize_speech_configs: MutableMapping[ + str, "SynthesizeSpeechConfig" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message="SynthesizeSpeechConfig", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/changelog.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/changelog.py new file mode 100644 index 000000000000..2de2e71e7f7f --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/changelog.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "ListChangelogsRequest", + "ListChangelogsResponse", + "GetChangelogRequest", + "Changelog", + }, +) + + +class ListChangelogsRequest(proto.Message): + r"""The request message for + [Changelogs.ListChangelogs][google.cloud.dialogflow.cx.v3.Changelogs.ListChangelogs]. + + Attributes: + parent (str): + Required. The agent containing the changelogs. Format: + ``projects//locations//agents/``. + filter (str): + The filter string. Supports filter by user_email, resource, + type and create_time. Some examples: + + 1. By user email: user_email = "someone@google.com" + 2. By resource name: resource = + "projects/123/locations/global/agents/456/flows/789" + 3. By resource display name: display_name = "my agent" + 4. By action: action = "Create" + 5. By type: type = "flows" + 6. By create time. Currently predicates on ``create_time`` + and ``create_time_epoch_seconds`` are supported: + create_time_epoch_seconds > 1551790877 AND create_time <= + 2017-01-15T01:30:15.01Z + 7. Combination of above filters: resource = + "projects/123/locations/global/agents/456/flows/789" AND + user_email = "someone@google.com" AND create_time <= + 2017-01-15T01:30:15.01Z + page_size (int): + The maximum number of items to return in a + single page. By default 100 and at most 1000. + page_token (str): + The next_page_token value returned from a previous list + request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListChangelogsResponse(proto.Message): + r"""The response message for + [Changelogs.ListChangelogs][google.cloud.dialogflow.cx.v3.Changelogs.ListChangelogs]. + + Attributes: + changelogs (MutableSequence[google.cloud.dialogflowcx_v3.types.Changelog]): + The list of changelogs. There will be a maximum number of + items returned based on the page_size field in the request. + The changelogs will be ordered by timestamp. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + changelogs: MutableSequence["Changelog"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Changelog", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetChangelogRequest(proto.Message): + r"""The request message for + [Changelogs.GetChangelog][google.cloud.dialogflow.cx.v3.Changelogs.GetChangelog]. + + Attributes: + name (str): + Required. The name of the changelog to get. Format: + ``projects//locations//agents//changelogs/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Changelog(proto.Message): + r"""Changelogs represents a change made to a given agent. + + Attributes: + name (str): + The unique identifier of the changelog. Format: + ``projects//locations//agents//changelogs/``. + user_email (str): + Email address of the authenticated user. + display_name (str): + The affected resource display name of the + change. + action (str): + The action of the change. + type_ (str): + The affected resource type. + resource (str): + The affected resource name of the change. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the change. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + user_email: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=7, + ) + action: str = proto.Field( + proto.STRING, + number=11, + ) + type_: str = proto.Field( + proto.STRING, + number=8, + ) + resource: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/data_store_connection.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/data_store_connection.py new file mode 100644 index 000000000000..5dc9c9f33e29 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/data_store_connection.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "DataStoreType", + "DataStoreConnection", + }, +) + + +class DataStoreType(proto.Enum): + r"""Type of a data store. + Determines how search is performed in the data store. + + Values: + DATA_STORE_TYPE_UNSPECIFIED (0): + Not specified. This value indicates that the + data store type is not specified, so it will not + be used during search. + PUBLIC_WEB (1): + A data store that contains public web + content. + UNSTRUCTURED (2): + A data store that contains unstructured + private data. + STRUCTURED (3): + A data store that contains structured data + (for example FAQ). + """ + DATA_STORE_TYPE_UNSPECIFIED = 0 + PUBLIC_WEB = 1 + UNSTRUCTURED = 2 + STRUCTURED = 3 + + +class DataStoreConnection(proto.Message): + r"""A data store connection. It represents a data store in + Discovery Engine and the type of the contents it contains. + + Attributes: + data_store_type (google.cloud.dialogflowcx_v3.types.DataStoreType): + The type of the connected data store. + data_store (str): + The full name of the referenced data store. Formats: + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`` + ``projects/{project}/locations/{location}/dataStores/{data_store}`` + """ + + data_store_type: "DataStoreType" = proto.Field( + proto.ENUM, + number=1, + enum="DataStoreType", + ) + data_store: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/deployment.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/deployment.py new file mode 100644 index 000000000000..0f814bb3fa5a --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/deployment.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "Deployment", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "GetDeploymentRequest", + }, +) + + +class Deployment(proto.Message): + r"""Represents a deployment in an environment. A deployment + happens when a flow version configured to be active in the + environment. You can configure running pre-deployment steps, + e.g. running validation test cases, experiment auto-rollout, + etc. + + Attributes: + name (str): + The name of the deployment. + Format: projects//locations//agents//environments//deployments/. + flow_version (str): + The name of the flow version for this + deployment. Format: projects//locations//agents//flows//versions/. + state (google.cloud.dialogflowcx_v3.types.Deployment.State): + The current state of the deployment. + result (google.cloud.dialogflowcx_v3.types.Deployment.Result): + Result of the deployment. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start time of this deployment. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End time of this deployment. + """ + + class State(proto.Enum): + r"""The state of the deployment. + + Values: + STATE_UNSPECIFIED (0): + State unspecified. + RUNNING (1): + The deployment is running. + SUCCEEDED (2): + The deployment succeeded. + FAILED (3): + The deployment failed. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + FAILED = 3 + + class Result(proto.Message): + r"""Result of the deployment. + + Attributes: + deployment_test_results (MutableSequence[str]): + Results of test cases running before the deployment. Format: + ``projects//locations//agents//testCases//results/``. + experiment (str): + The name of the experiment triggered by this + deployment. Format: projects//locations//agents//environments//experiments/. + """ + + deployment_test_results: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + experiment: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + flow_version: str = proto.Field( + proto.STRING, + number=2, + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + result: Result = proto.Field( + proto.MESSAGE, + number=4, + message=Result, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + +class ListDeploymentsRequest(proto.Message): + r"""The request message for + [Deployments.ListDeployments][google.cloud.dialogflow.cx.v3.Deployments.ListDeployments]. + + Attributes: + parent (str): + Required. The + [Environment][google.cloud.dialogflow.cx.v3.Environment] to + list all environments for. Format: + ``projects//locations//agents//environments/``. + page_size (int): + The maximum number of items to return in a + single page. By default 20 and at most 100. + page_token (str): + The next_page_token value returned from a previous list + request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDeploymentsResponse(proto.Message): + r"""The response message for + [Deployments.ListDeployments][google.cloud.dialogflow.cx.v3.Deployments.ListDeployments]. + + Attributes: + deployments (MutableSequence[google.cloud.dialogflowcx_v3.types.Deployment]): + The list of deployments. There will be a maximum number of + items returned based on the page_size field in the request. + The list may in some cases be empty or contain fewer entries + than page_size even if this isn't the last page. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + deployments: MutableSequence["Deployment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Deployment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetDeploymentRequest(proto.Message): + r"""The request message for + [Deployments.GetDeployment][google.cloud.dialogflow.cx.v3.Deployments.GetDeployment]. + + Attributes: + name (str): + Required. The name of the + [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. + Format: + ``projects//locations//agents//environments//deployments/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/entity_type.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/entity_type.py new file mode 100644 index 000000000000..bfbe1a2f6d17 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/entity_type.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "EntityType", + "ListEntityTypesRequest", + "ListEntityTypesResponse", + "GetEntityTypeRequest", + "CreateEntityTypeRequest", + "UpdateEntityTypeRequest", + "DeleteEntityTypeRequest", + }, +) + + +class EntityType(proto.Message): + r"""Entities are extracted from user input and represent parameters that + are meaningful to your application. For example, a date range, a + proper name such as a geographic location or landmark, and so on. + Entities represent actionable data for your application. + + When you define an entity, you can also include synonyms that all + map to that entity. For example, "soft drink", "soda", "pop", and so + on. + + There are three types of entities: + + - **System** - entities that are defined by the Dialogflow API for + common data types such as date, time, currency, and so on. A + system entity is represented by the ``EntityType`` type. + + - **Custom** - entities that are defined by you that represent + actionable data that is meaningful to your application. For + example, you could define a ``pizza.sauce`` entity for red or + white pizza sauce, a ``pizza.cheese`` entity for the different + types of cheese on a pizza, a ``pizza.topping`` entity for + different toppings, and so on. A custom entity is represented by + the ``EntityType`` type. + + - **User** - entities that are built for an individual user such as + favorites, preferences, playlists, and so on. A user entity is + represented by the + [SessionEntityType][google.cloud.dialogflow.cx.v3.SessionEntityType] + type. + + For more information about entity types, see the `Dialogflow + documentation `__. + + Attributes: + name (str): + The unique identifier of the entity type. Required for + [EntityTypes.UpdateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.UpdateEntityType]. + Format: + ``projects//locations//agents//entityTypes/``. + display_name (str): + Required. The human-readable name of the + entity type, unique within the agent. + kind (google.cloud.dialogflowcx_v3.types.EntityType.Kind): + Required. Indicates the kind of entity type. + auto_expansion_mode (google.cloud.dialogflowcx_v3.types.EntityType.AutoExpansionMode): + Indicates whether the entity type can be + automatically expanded. + entities (MutableSequence[google.cloud.dialogflowcx_v3.types.EntityType.Entity]): + The collection of entity entries associated + with the entity type. + excluded_phrases (MutableSequence[google.cloud.dialogflowcx_v3.types.EntityType.ExcludedPhrase]): + Collection of exceptional words and phrases that shouldn't + be matched. For example, if you have a size entity type with + entry ``giant``\ (an adjective), you might consider adding + ``giants``\ (a noun) as an exclusion. If the kind of entity + type is ``KIND_MAP``, then the phrases specified by entities + and excluded phrases should be mutually exclusive. + enable_fuzzy_extraction (bool): + Enables fuzzy entity extraction during + classification. + redact (bool): + Indicates whether parameters of the entity + type should be redacted in log. If redaction is + enabled, page parameters and intent parameters + referring to the entity type will be replaced by + parameter name when logging. + """ + + class Kind(proto.Enum): + r"""Represents kinds of entities. + + Values: + KIND_UNSPECIFIED (0): + Not specified. This value should be never + used. + KIND_MAP (1): + Map entity types allow mapping of a group of + synonyms to a canonical value. + KIND_LIST (2): + List entity types contain a set of entries + that do not map to canonical values. However, + list entity types can contain references to + other entity types (with or without aliases). + KIND_REGEXP (3): + Regexp entity types allow to specify regular + expressions in entries values. + """ + KIND_UNSPECIFIED = 0 + KIND_MAP = 1 + KIND_LIST = 2 + KIND_REGEXP = 3 + + class AutoExpansionMode(proto.Enum): + r"""Represents different entity type expansion modes. Automated + expansion allows an agent to recognize values that have not been + explicitly listed in the entity (for example, new kinds of + shopping list items). + + Values: + AUTO_EXPANSION_MODE_UNSPECIFIED (0): + Auto expansion disabled for the entity. + AUTO_EXPANSION_MODE_DEFAULT (1): + Allows an agent to recognize values that have + not been explicitly listed in the entity. + """ + AUTO_EXPANSION_MODE_UNSPECIFIED = 0 + AUTO_EXPANSION_MODE_DEFAULT = 1 + + class Entity(proto.Message): + r"""An **entity entry** for an associated entity type. + + Attributes: + value (str): + Required. The primary value associated with this entity + entry. For example, if the entity type is *vegetable*, the + value could be *scallions*. + + For ``KIND_MAP`` entity types: + + - A canonical value to be used in place of synonyms. + + For ``KIND_LIST`` entity types: + + - A string that can contain references to other entity + types (with or without aliases). + synonyms (MutableSequence[str]): + Required. A collection of value synonyms. For example, if + the entity type is *vegetable*, and ``value`` is + *scallions*, a synonym could be *green onions*. + + For ``KIND_LIST`` entity types: + + - This collection must contain exactly one synonym equal to + ``value``. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + ) + synonyms: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + class ExcludedPhrase(proto.Message): + r"""An excluded entity phrase that should not be matched. + + Attributes: + value (str): + Required. The word or phrase to be excluded. + """ + + value: str = proto.Field( + proto.STRING, + number=1, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + kind: Kind = proto.Field( + proto.ENUM, + number=3, + enum=Kind, + ) + auto_expansion_mode: AutoExpansionMode = proto.Field( + proto.ENUM, + number=4, + enum=AutoExpansionMode, + ) + entities: MutableSequence[Entity] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=Entity, + ) + excluded_phrases: MutableSequence[ExcludedPhrase] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=ExcludedPhrase, + ) + enable_fuzzy_extraction: bool = proto.Field( + proto.BOOL, + number=7, + ) + redact: bool = proto.Field( + proto.BOOL, + number=9, + ) + + +class ListEntityTypesRequest(proto.Message): + r"""The request message for + [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. + + Attributes: + parent (str): + Required. The agent to list all entity types for. Format: + ``projects//locations//agents/``. + language_code (str): + The language to list entity types for. The following fields + are language dependent: + + - ``EntityType.entities.value`` + - ``EntityType.entities.synonyms`` + - ``EntityType.excluded_phrases.value`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + page_size (int): + The maximum number of items to return in a + single page. By default 100 and at most 1000. + page_token (str): + The next_page_token value returned from a previous list + request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListEntityTypesResponse(proto.Message): + r"""The response message for + [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. + + Attributes: + entity_types (MutableSequence[google.cloud.dialogflowcx_v3.types.EntityType]): + The list of entity types. There will be a maximum number of + items returned based on the page_size field in the request. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + entity_types: MutableSequence["EntityType"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="EntityType", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetEntityTypeRequest(proto.Message): + r"""The request message for + [EntityTypes.GetEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.GetEntityType]. + + Attributes: + name (str): + Required. The name of the entity type. Format: + ``projects//locations//agents//entityTypes/``. + language_code (str): + The language to retrieve the entity type for. The following + fields are language dependent: + + - ``EntityType.entities.value`` + - ``EntityType.entities.synonyms`` + - ``EntityType.excluded_phrases.value`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateEntityTypeRequest(proto.Message): + r"""The request message for + [EntityTypes.CreateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.CreateEntityType]. + + Attributes: + parent (str): + Required. The agent to create a entity type for. Format: + ``projects//locations//agents/``. + entity_type (google.cloud.dialogflowcx_v3.types.EntityType): + Required. The entity type to create. + language_code (str): + The language of the following fields in ``entity_type``: + + - ``EntityType.entities.value`` + - ``EntityType.entities.synonyms`` + - ``EntityType.excluded_phrases.value`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entity_type: "EntityType" = proto.Field( + proto.MESSAGE, + number=2, + message="EntityType", + ) + language_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateEntityTypeRequest(proto.Message): + r"""The request message for + [EntityTypes.UpdateEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.UpdateEntityType]. + + Attributes: + entity_type (google.cloud.dialogflowcx_v3.types.EntityType): + Required. The entity type to update. + language_code (str): + The language of the following fields in ``entity_type``: + + - ``EntityType.entities.value`` + - ``EntityType.entities.synonyms`` + - ``EntityType.excluded_phrases.value`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get updated. + """ + + entity_type: "EntityType" = proto.Field( + proto.MESSAGE, + number=1, + message="EntityType", + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteEntityTypeRequest(proto.Message): + r"""The request message for + [EntityTypes.DeleteEntityType][google.cloud.dialogflow.cx.v3.EntityTypes.DeleteEntityType]. + + Attributes: + name (str): + Required. The name of the entity type to delete. Format: + ``projects//locations//agents//entityTypes/``. + force (bool): + This field has no effect for entity type not being used. For + entity types that are used by intents or pages: + + - If ``force`` is set to false, an error will be returned + with message indicating the referencing resources. + - If ``force`` is set to true, Dialogflow will remove the + entity type, as well as any references to the entity type + (i.e. Page + [parameter][google.cloud.dialogflow.cx.v3.Form.Parameter] + of the entity type will be changed to '@sys.any' and + intent + [parameter][google.cloud.dialogflow.cx.v3.Intent.Parameter] + of the entity type will be removed). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/environment.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/environment.py new file mode 100644 index 000000000000..3f6acee9014d --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/environment.py @@ -0,0 +1,628 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.dialogflowcx_v3.types import test_case, webhook + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "Environment", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", + "GetEnvironmentRequest", + "CreateEnvironmentRequest", + "UpdateEnvironmentRequest", + "DeleteEnvironmentRequest", + "LookupEnvironmentHistoryRequest", + "LookupEnvironmentHistoryResponse", + "ContinuousTestResult", + "RunContinuousTestRequest", + "RunContinuousTestResponse", + "RunContinuousTestMetadata", + "ListContinuousTestResultsRequest", + "ListContinuousTestResultsResponse", + "DeployFlowRequest", + "DeployFlowResponse", + "DeployFlowMetadata", + }, +) + + +class Environment(proto.Message): + r"""Represents an environment for an agent. You can create + multiple versions of your agent and publish them to separate + environments. When you edit an agent, you are editing the draft + agent. At any point, you can save the draft agent as an agent + version, which is an immutable snapshot of your agent. When you + save the draft agent, it is published to the default + environment. When you create agent versions, you can publish + them to custom environments. You can create a variety of custom + environments for testing, development, production, etc. + + Attributes: + name (str): + The name of the environment. Format: + ``projects//locations//agents//environments/``. + display_name (str): + Required. The human-readable name of the + environment (unique in an agent). Limit of 64 + characters. + description (str): + The human-readable description of the + environment. The maximum length is 500 + characters. If exceeded, the request is + rejected. + version_configs (MutableSequence[google.cloud.dialogflowcx_v3.types.Environment.VersionConfig]): + A list of configurations for flow versions. You should + include version configs for all flows that are reachable + from [``Start Flow``][Agent.start_flow] in the agent. + Otherwise, an error will be returned. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time of this environment. + test_cases_config (google.cloud.dialogflowcx_v3.types.Environment.TestCasesConfig): + The test cases config for continuous tests of + this environment. + webhook_config (google.cloud.dialogflowcx_v3.types.Environment.WebhookConfig): + The webhook configuration for this + environment. + """ + + class VersionConfig(proto.Message): + r"""Configuration for the version. + + Attributes: + version (str): + Required. Format: projects//locations//agents//flows//versions/. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + + class TestCasesConfig(proto.Message): + r"""The configuration for continuous tests. + + Attributes: + test_cases (MutableSequence[str]): + A list of test case names to run. They should be under the + same agent. Format of each test case name: + ``projects//locations/ /agents//testCases/`` + enable_continuous_run (bool): + Whether to run test cases in + [TestCasesConfig.test_cases][google.cloud.dialogflow.cx.v3.Environment.TestCasesConfig.test_cases] + periodically. Default false. If set to true, run once a day. + enable_predeployment_run (bool): + Whether to run test cases in + [TestCasesConfig.test_cases][google.cloud.dialogflow.cx.v3.Environment.TestCasesConfig.test_cases] + before deploying a flow version to the environment. Default + false. + """ + + test_cases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + enable_continuous_run: bool = proto.Field( + proto.BOOL, + number=2, + ) + enable_predeployment_run: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class WebhookConfig(proto.Message): + r"""Configuration for webhooks. + + Attributes: + webhook_overrides (MutableSequence[google.cloud.dialogflowcx_v3.types.Webhook]): + The list of webhooks to override for the agent environment. + The webhook must exist in the agent. You can override fields + in + [``generic_web_service``][google.cloud.dialogflow.cx.v3.Webhook.generic_web_service] + and + [``service_directory``][google.cloud.dialogflow.cx.v3.Webhook.service_directory]. + """ + + webhook_overrides: MutableSequence[webhook.Webhook] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=webhook.Webhook, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + version_configs: MutableSequence[VersionConfig] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=VersionConfig, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + test_cases_config: TestCasesConfig = proto.Field( + proto.MESSAGE, + number=7, + message=TestCasesConfig, + ) + webhook_config: WebhookConfig = proto.Field( + proto.MESSAGE, + number=10, + message=WebhookConfig, + ) + + +class ListEnvironmentsRequest(proto.Message): + r"""The request message for + [Environments.ListEnvironments][google.cloud.dialogflow.cx.v3.Environments.ListEnvironments]. + + Attributes: + parent (str): + Required. The [Agent][google.cloud.dialogflow.cx.v3.Agent] + to list all environments for. Format: + ``projects//locations//agents/``. + page_size (int): + The maximum number of items to return in a + single page. By default 20 and at most 100. + page_token (str): + The next_page_token value returned from a previous list + request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListEnvironmentsResponse(proto.Message): + r"""The response message for + [Environments.ListEnvironments][google.cloud.dialogflow.cx.v3.Environments.ListEnvironments]. + + Attributes: + environments (MutableSequence[google.cloud.dialogflowcx_v3.types.Environment]): + The list of environments. There will be a maximum number of + items returned based on the page_size field in the request. + The list may in some cases be empty or contain fewer entries + than page_size even if this isn't the last page. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + environments: MutableSequence["Environment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Environment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetEnvironmentRequest(proto.Message): + r"""The request message for + [Environments.GetEnvironment][google.cloud.dialogflow.cx.v3.Environments.GetEnvironment]. + + Attributes: + name (str): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + Format: + ``projects//locations//agents//environments/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateEnvironmentRequest(proto.Message): + r"""The request message for + [Environments.CreateEnvironment][google.cloud.dialogflow.cx.v3.Environments.CreateEnvironment]. + + Attributes: + parent (str): + Required. The [Agent][google.cloud.dialogflow.cx.v3.Agent] + to create an + [Environment][google.cloud.dialogflow.cx.v3.Environment] + for. Format: + ``projects//locations//agents/``. + environment (google.cloud.dialogflowcx_v3.types.Environment): + Required. The environment to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + environment: "Environment" = proto.Field( + proto.MESSAGE, + number=2, + message="Environment", + ) + + +class UpdateEnvironmentRequest(proto.Message): + r"""The request message for + [Environments.UpdateEnvironment][google.cloud.dialogflow.cx.v3.Environments.UpdateEnvironment]. + + Attributes: + environment (google.cloud.dialogflowcx_v3.types.Environment): + Required. The environment to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The mask to control which fields + get updated. + """ + + environment: "Environment" = proto.Field( + proto.MESSAGE, + number=1, + message="Environment", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteEnvironmentRequest(proto.Message): + r"""The request message for + [Environments.DeleteEnvironment][google.cloud.dialogflow.cx.v3.Environments.DeleteEnvironment]. + + Attributes: + name (str): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment] to + delete. Format: + ``projects//locations//agents//environments/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LookupEnvironmentHistoryRequest(proto.Message): + r"""The request message for + [Environments.LookupEnvironmentHistory][google.cloud.dialogflow.cx.v3.Environments.LookupEnvironmentHistory]. + + Attributes: + name (str): + Required. Resource name of the environment to look up the + history for. Format: + ``projects//locations//agents//environments/``. + page_size (int): + The maximum number of items to return in a + single page. By default 100 and at most 1000. + page_token (str): + The next_page_token value returned from a previous list + request. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LookupEnvironmentHistoryResponse(proto.Message): + r"""The response message for + [Environments.LookupEnvironmentHistory][google.cloud.dialogflow.cx.v3.Environments.LookupEnvironmentHistory]. + + Attributes: + environments (MutableSequence[google.cloud.dialogflowcx_v3.types.Environment]): + Represents a list of snapshots for an environment. Time of + the snapshots is stored in + [``update_time``][google.cloud.dialogflow.cx.v3.Environment.update_time]. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + environments: MutableSequence["Environment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Environment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ContinuousTestResult(proto.Message): + r"""Represents a result from running a test case in an agent + environment. + + Attributes: + name (str): + The resource name for the continuous test result. Format: + ``projects//locations//agents//environments//continuousTestResults/``. + result (google.cloud.dialogflowcx_v3.types.ContinuousTestResult.AggregatedTestResult): + The result of this continuous test run, i.e. + whether all the tests in this continuous test + run pass or not. + test_case_results (MutableSequence[str]): + A list of individual test case results names + in this continuous test run. + run_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the continuous testing run starts. + """ + + class AggregatedTestResult(proto.Enum): + r"""The overall result for a continuous test run in an agent + environment. + + Values: + AGGREGATED_TEST_RESULT_UNSPECIFIED (0): + Not specified. Should never be used. + PASSED (1): + All the tests passed. + FAILED (2): + At least one test did not pass. + """ + AGGREGATED_TEST_RESULT_UNSPECIFIED = 0 + PASSED = 1 + FAILED = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + result: AggregatedTestResult = proto.Field( + proto.ENUM, + number=2, + enum=AggregatedTestResult, + ) + test_case_results: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class RunContinuousTestRequest(proto.Message): + r"""The request message for + [Environments.RunContinuousTest][google.cloud.dialogflow.cx.v3.Environments.RunContinuousTest]. + + Attributes: + environment (str): + Required. Format: + ``projects//locations//agents//environments/``. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RunContinuousTestResponse(proto.Message): + r"""The response message for + [Environments.RunContinuousTest][google.cloud.dialogflow.cx.v3.Environments.RunContinuousTest]. + + Attributes: + continuous_test_result (google.cloud.dialogflowcx_v3.types.ContinuousTestResult): + The result for a continuous test run. + """ + + continuous_test_result: "ContinuousTestResult" = proto.Field( + proto.MESSAGE, + number=1, + message="ContinuousTestResult", + ) + + +class RunContinuousTestMetadata(proto.Message): + r"""Metadata returned for the + [Environments.RunContinuousTest][google.cloud.dialogflow.cx.v3.Environments.RunContinuousTest] + long running operation. + + Attributes: + errors (MutableSequence[google.cloud.dialogflowcx_v3.types.TestError]): + The test errors. + """ + + errors: MutableSequence[test_case.TestError] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=test_case.TestError, + ) + + +class ListContinuousTestResultsRequest(proto.Message): + r"""The request message for + [Environments.ListContinuousTestResults][google.cloud.dialogflow.cx.v3.Environments.ListContinuousTestResults]. + + Attributes: + parent (str): + Required. The environment to list results for. Format: + ``projects//locations//agents// environments/``. + page_size (int): + The maximum number of items to return in a + single page. By default 100 and at most 1000. + page_token (str): + The next_page_token value returned from a previous list + request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListContinuousTestResultsResponse(proto.Message): + r"""The response message for [Environments.ListTestCaseResults][]. + + Attributes: + continuous_test_results (MutableSequence[google.cloud.dialogflowcx_v3.types.ContinuousTestResult]): + The list of continuous test results. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + continuous_test_results: MutableSequence[ + "ContinuousTestResult" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ContinuousTestResult", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeployFlowRequest(proto.Message): + r"""The request message for + [Environments.DeployFlow][google.cloud.dialogflow.cx.v3.Environments.DeployFlow]. + + Attributes: + environment (str): + Required. The environment to deploy the flow to. Format: + ``projects//locations//agents// environments/``. + flow_version (str): + Required. The flow version to deploy. Format: + ``projects//locations//agents// flows//versions/``. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + flow_version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeployFlowResponse(proto.Message): + r"""The response message for + [Environments.DeployFlow][google.cloud.dialogflow.cx.v3.Environments.DeployFlow]. + + Attributes: + environment (google.cloud.dialogflowcx_v3.types.Environment): + The updated environment where the flow is + deployed. + deployment (str): + The name of the flow version + [Deployment][google.cloud.dialogflow.cx.v3.Deployment]. + Format: + ``projects//locations//agents// environments//deployments/``. + """ + + environment: "Environment" = proto.Field( + proto.MESSAGE, + number=1, + message="Environment", + ) + deployment: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeployFlowMetadata(proto.Message): + r"""Metadata returned for the + [Environments.DeployFlow][google.cloud.dialogflow.cx.v3.Environments.DeployFlow] + long running operation. + + Attributes: + test_errors (MutableSequence[google.cloud.dialogflowcx_v3.types.TestError]): + Errors of running deployment tests. + """ + + test_errors: MutableSequence[test_case.TestError] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=test_case.TestError, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/experiment.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/experiment.py new file mode 100644 index 000000000000..d4f7e7314a97 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/experiment.py @@ -0,0 +1,773 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "Experiment", + "VersionVariants", + "VariantsHistory", + "RolloutConfig", + "RolloutState", + "ListExperimentsRequest", + "ListExperimentsResponse", + "GetExperimentRequest", + "CreateExperimentRequest", + "UpdateExperimentRequest", + "DeleteExperimentRequest", + "StartExperimentRequest", + "StopExperimentRequest", + }, +) + + +class Experiment(proto.Message): + r"""Represents an experiment in an environment. + + Attributes: + name (str): + The name of the experiment. + Format: projects//locations//agents//environments//experiments/.. + display_name (str): + Required. The human-readable name of the + experiment (unique in an environment). Limit of + 64 characters. + description (str): + The human-readable description of the + experiment. + state (google.cloud.dialogflowcx_v3.types.Experiment.State): + The current state of the experiment. + Transition triggered by + Experiments.StartExperiment: DRAFT->RUNNING. + Transition triggered by + Experiments.CancelExperiment: DRAFT->DONE or + RUNNING->DONE. + definition (google.cloud.dialogflowcx_v3.types.Experiment.Definition): + The definition of the experiment. + rollout_config (google.cloud.dialogflowcx_v3.types.RolloutConfig): + The configuration for auto rollout. If set, + there should be exactly two variants in the + experiment (control variant being the default + version of the flow), the traffic allocation for + the non-control variant will gradually increase + to 100% when conditions are met, and eventually + replace the control variant to become the + default version of the flow. + rollout_state (google.cloud.dialogflowcx_v3.types.RolloutState): + State of the auto rollout process. + rollout_failure_reason (str): + The reason why rollout has failed. Should only be set when + state is ROLLOUT_FAILED. + result (google.cloud.dialogflowcx_v3.types.Experiment.Result): + Inference result of the experiment. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Creation time of this experiment. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start time of this experiment. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End time of this experiment. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + Last update time of this experiment. + experiment_length (google.protobuf.duration_pb2.Duration): + Maximum number of days to run the + experiment/rollout. If auto-rollout is not + enabled, default value and maximum will be 30 + days. If auto-rollout is enabled, default value + and maximum will be 6 days. + variants_history (MutableSequence[google.cloud.dialogflowcx_v3.types.VariantsHistory]): + The history of updates to the experiment + variants. + """ + + class State(proto.Enum): + r"""The state of the experiment. + + Values: + STATE_UNSPECIFIED (0): + State unspecified. + DRAFT (1): + The experiment is created but not started + yet. + RUNNING (2): + The experiment is running. + DONE (3): + The experiment is done. + ROLLOUT_FAILED (4): + The experiment with auto-rollout enabled has + failed. + """ + STATE_UNSPECIFIED = 0 + DRAFT = 1 + RUNNING = 2 + DONE = 3 + ROLLOUT_FAILED = 4 + + class Definition(proto.Message): + r"""Definition of the experiment. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + condition (str): + The condition defines which subset of sessions are selected + for this experiment. If not specified, all sessions are + eligible. E.g. "query_input.language_code=en" See the + `conditions + reference `__. + version_variants (google.cloud.dialogflowcx_v3.types.VersionVariants): + The flow versions as the variants of this + experiment. + + This field is a member of `oneof`_ ``variants``. + """ + + condition: str = proto.Field( + proto.STRING, + number=1, + ) + version_variants: "VersionVariants" = proto.Field( + proto.MESSAGE, + number=2, + oneof="variants", + message="VersionVariants", + ) + + class Result(proto.Message): + r"""The inference result which includes an objective metric to + optimize and the confidence interval. + + Attributes: + version_metrics (MutableSequence[google.cloud.dialogflowcx_v3.types.Experiment.Result.VersionMetrics]): + Version variants and metrics. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + The last time the experiment's stats data was + updated. Will have default value if stats have + never been computed for this experiment. + """ + + class MetricType(proto.Enum): + r"""Types of ratio-based metric for Dialogflow experiment. + + Values: + METRIC_UNSPECIFIED (0): + Metric unspecified. + CONTAINED_SESSION_NO_CALLBACK_RATE (1): + Percentage of contained sessions without user + calling back in 24 hours. + LIVE_AGENT_HANDOFF_RATE (2): + Percentage of sessions that were handed to a + human agent. + CALLBACK_SESSION_RATE (3): + Percentage of sessions with the same user + calling back. + ABANDONED_SESSION_RATE (4): + Percentage of sessions where user hung up. + SESSION_END_RATE (5): + Percentage of sessions reached Dialogflow 'END_PAGE' or + 'END_SESSION'. + """ + METRIC_UNSPECIFIED = 0 + CONTAINED_SESSION_NO_CALLBACK_RATE = 1 + LIVE_AGENT_HANDOFF_RATE = 2 + CALLBACK_SESSION_RATE = 3 + ABANDONED_SESSION_RATE = 4 + SESSION_END_RATE = 5 + + class CountType(proto.Enum): + r"""Types of count-based metric for Dialogflow experiment. + + Values: + COUNT_TYPE_UNSPECIFIED (0): + Count type unspecified. + TOTAL_NO_MATCH_COUNT (1): + Total number of occurrences of a 'NO_MATCH'. + TOTAL_TURN_COUNT (2): + Total number of turn counts. + AVERAGE_TURN_COUNT (3): + Average turn count in a session. + """ + COUNT_TYPE_UNSPECIFIED = 0 + TOTAL_NO_MATCH_COUNT = 1 + TOTAL_TURN_COUNT = 2 + AVERAGE_TURN_COUNT = 3 + + class ConfidenceInterval(proto.Message): + r"""A confidence interval is a range of possible values for the + experiment objective you are trying to measure. + + Attributes: + confidence_level (float): + The confidence level used to construct the + interval, i.e. there is X% chance that the true + value is within this interval. + ratio (float): + The percent change between an experiment + metric's value and the value for its control. + lower_bound (float): + Lower bound of the interval. + upper_bound (float): + Upper bound of the interval. + """ + + confidence_level: float = proto.Field( + proto.DOUBLE, + number=1, + ) + ratio: float = proto.Field( + proto.DOUBLE, + number=2, + ) + lower_bound: float = proto.Field( + proto.DOUBLE, + number=3, + ) + upper_bound: float = proto.Field( + proto.DOUBLE, + number=4, + ) + + class Metric(proto.Message): + r"""Metric and corresponding confidence intervals. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.dialogflowcx_v3.types.Experiment.Result.MetricType): + Ratio-based metric type. Only one of type or count_type is + specified in each Metric. + count_type (google.cloud.dialogflowcx_v3.types.Experiment.Result.CountType): + Count-based metric type. Only one of type or count_type is + specified in each Metric. + ratio (float): + Ratio value of a metric. + + This field is a member of `oneof`_ ``value``. + count (float): + Count value of a metric. + + This field is a member of `oneof`_ ``value``. + confidence_interval (google.cloud.dialogflowcx_v3.types.Experiment.Result.ConfidenceInterval): + The probability that the treatment is better + than all other treatments in the experiment + """ + + type_: "Experiment.Result.MetricType" = proto.Field( + proto.ENUM, + number=1, + enum="Experiment.Result.MetricType", + ) + count_type: "Experiment.Result.CountType" = proto.Field( + proto.ENUM, + number=5, + enum="Experiment.Result.CountType", + ) + ratio: float = proto.Field( + proto.DOUBLE, + number=2, + oneof="value", + ) + count: float = proto.Field( + proto.DOUBLE, + number=4, + oneof="value", + ) + confidence_interval: "Experiment.Result.ConfidenceInterval" = proto.Field( + proto.MESSAGE, + number=3, + message="Experiment.Result.ConfidenceInterval", + ) + + class VersionMetrics(proto.Message): + r"""Version variant and associated metrics. + + Attributes: + version (str): + The name of the flow + [Version][google.cloud.dialogflow.cx.v3.Version]. Format: + ``projects//locations//agents//flows//versions/``. + metrics (MutableSequence[google.cloud.dialogflowcx_v3.types.Experiment.Result.Metric]): + The metrics and corresponding confidence + intervals in the inference result. + session_count (int): + Number of sessions that were allocated to + this version. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + metrics: MutableSequence["Experiment.Result.Metric"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Experiment.Result.Metric", + ) + session_count: int = proto.Field( + proto.INT32, + number=3, + ) + + version_metrics: MutableSequence[ + "Experiment.Result.VersionMetrics" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Experiment.Result.VersionMetrics", + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + definition: Definition = proto.Field( + proto.MESSAGE, + number=5, + message=Definition, + ) + rollout_config: "RolloutConfig" = proto.Field( + proto.MESSAGE, + number=14, + message="RolloutConfig", + ) + rollout_state: "RolloutState" = proto.Field( + proto.MESSAGE, + number=15, + message="RolloutState", + ) + rollout_failure_reason: str = proto.Field( + proto.STRING, + number=16, + ) + result: Result = proto.Field( + proto.MESSAGE, + number=6, + message=Result, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + experiment_length: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=11, + message=duration_pb2.Duration, + ) + variants_history: MutableSequence["VariantsHistory"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="VariantsHistory", + ) + + +class VersionVariants(proto.Message): + r"""A list of flow version variants. + + Attributes: + variants (MutableSequence[google.cloud.dialogflowcx_v3.types.VersionVariants.Variant]): + A list of flow version variants. + """ + + class Variant(proto.Message): + r"""A single flow version with specified traffic allocation. + + Attributes: + version (str): + The name of the flow version. Format: + ``projects//locations//agents//flows//versions/``. + traffic_allocation (float): + Percentage of the traffic which should be + routed to this version of flow. Traffic + allocation for a single flow must sum up to 1.0. + is_control_group (bool): + Whether the variant is for the control group. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + traffic_allocation: float = proto.Field( + proto.FLOAT, + number=2, + ) + is_control_group: bool = proto.Field( + proto.BOOL, + number=3, + ) + + variants: MutableSequence[Variant] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Variant, + ) + + +class VariantsHistory(proto.Message): + r"""The history of variants update. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + version_variants (google.cloud.dialogflowcx_v3.types.VersionVariants): + The flow versions as the variants. + + This field is a member of `oneof`_ ``variants``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Update time of the variants. + """ + + version_variants: "VersionVariants" = proto.Field( + proto.MESSAGE, + number=1, + oneof="variants", + message="VersionVariants", + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class RolloutConfig(proto.Message): + r"""The configuration for auto rollout. + + Attributes: + rollout_steps (MutableSequence[google.cloud.dialogflowcx_v3.types.RolloutConfig.RolloutStep]): + Steps to roll out a flow version. Steps + should be sorted by percentage in ascending + order. + rollout_condition (str): + The conditions that are used to evaluate the success of a + rollout step. If not specified, all rollout steps will + proceed to the next one unless failure conditions are met. + E.g. "containment_rate > 60% AND callback_rate < 20%". See + the `conditions + reference `__. + failure_condition (str): + The conditions that are used to evaluate the failure of a + rollout step. If not specified, no rollout steps will fail. + E.g. "containment_rate < 10% OR average_turn_count < 3". See + the `conditions + reference `__. + """ + + class RolloutStep(proto.Message): + r"""A single rollout step with specified traffic allocation. + + Attributes: + display_name (str): + The name of the rollout step; + traffic_percent (int): + The percentage of traffic allocated to the flow version of + this rollout step. (0%, 100%]. + min_duration (google.protobuf.duration_pb2.Duration): + The minimum time that this step should last. + Should be longer than 1 hour. If not set, the + default minimum duration for each step will be 1 + hour. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + traffic_percent: int = proto.Field( + proto.INT32, + number=2, + ) + min_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + rollout_steps: MutableSequence[RolloutStep] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=RolloutStep, + ) + rollout_condition: str = proto.Field( + proto.STRING, + number=2, + ) + failure_condition: str = proto.Field( + proto.STRING, + number=3, + ) + + +class RolloutState(proto.Message): + r"""State of the auto-rollout process. + + Attributes: + step (str): + Display name of the current auto rollout + step. + step_index (int): + Index of the current step in the auto rollout + steps list. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start time of the current step. + """ + + step: str = proto.Field( + proto.STRING, + number=1, + ) + step_index: int = proto.Field( + proto.INT32, + number=3, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class ListExperimentsRequest(proto.Message): + r"""The request message for + [Experiments.ListExperiments][google.cloud.dialogflow.cx.v3.Experiments.ListExperiments]. + + Attributes: + parent (str): + Required. The + [Environment][google.cloud.dialogflow.cx.v3.Environment] to + list all environments for. Format: + ``projects//locations//agents//environments/``. + page_size (int): + The maximum number of items to return in a + single page. By default 20 and at most 100. + page_token (str): + The next_page_token value returned from a previous list + request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListExperimentsResponse(proto.Message): + r"""The response message for + [Experiments.ListExperiments][google.cloud.dialogflow.cx.v3.Experiments.ListExperiments]. + + Attributes: + experiments (MutableSequence[google.cloud.dialogflowcx_v3.types.Experiment]): + The list of experiments. There will be a maximum number of + items returned based on the page_size field in the request. + The list may in some cases be empty or contain fewer entries + than page_size even if this isn't the last page. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + experiments: MutableSequence["Experiment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Experiment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetExperimentRequest(proto.Message): + r"""The request message for + [Experiments.GetExperiment][google.cloud.dialogflow.cx.v3.Experiments.GetExperiment]. + + Attributes: + name (str): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment]. + Format: + ``projects//locations//agents//environments//experiments/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateExperimentRequest(proto.Message): + r"""The request message for + [Experiments.CreateExperiment][google.cloud.dialogflow.cx.v3.Experiments.CreateExperiment]. + + Attributes: + parent (str): + Required. The [Agent][google.cloud.dialogflow.cx.v3.Agent] + to create an + [Environment][google.cloud.dialogflow.cx.v3.Environment] + for. Format: + ``projects//locations//agents//environments/``. + experiment (google.cloud.dialogflowcx_v3.types.Experiment): + Required. The experiment to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + experiment: "Experiment" = proto.Field( + proto.MESSAGE, + number=2, + message="Experiment", + ) + + +class UpdateExperimentRequest(proto.Message): + r"""The request message for + [Experiments.UpdateExperiment][google.cloud.dialogflow.cx.v3.Experiments.UpdateExperiment]. + + Attributes: + experiment (google.cloud.dialogflowcx_v3.types.Experiment): + Required. The experiment to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The mask to control which fields + get updated. + """ + + experiment: "Experiment" = proto.Field( + proto.MESSAGE, + number=1, + message="Experiment", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteExperimentRequest(proto.Message): + r"""The request message for + [Experiments.DeleteExperiment][google.cloud.dialogflow.cx.v3.Experiments.DeleteExperiment]. + + Attributes: + name (str): + Required. The name of the + [Environment][google.cloud.dialogflow.cx.v3.Environment] to + delete. Format: + ``projects//locations//agents//environments//experiments/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class StartExperimentRequest(proto.Message): + r"""The request message for + [Experiments.StartExperiment][google.cloud.dialogflow.cx.v3.Experiments.StartExperiment]. + + Attributes: + name (str): + Required. Resource name of the experiment to start. Format: + ``projects//locations//agents//environments//experiments/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class StopExperimentRequest(proto.Message): + r"""The request message for + [Experiments.StopExperiment][google.cloud.dialogflow.cx.v3.Experiments.StopExperiment]. + + Attributes: + name (str): + Required. Resource name of the experiment to stop. Format: + ``projects//locations//agents//environments//experiments/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/flow.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/flow.py new file mode 100644 index 000000000000..88755b20e9ba --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/flow.py @@ -0,0 +1,759 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.dialogflowcx_v3.types import import_strategy, page, validation_message + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "NluSettings", + "Flow", + "CreateFlowRequest", + "DeleteFlowRequest", + "ListFlowsRequest", + "ListFlowsResponse", + "GetFlowRequest", + "UpdateFlowRequest", + "TrainFlowRequest", + "ValidateFlowRequest", + "GetFlowValidationResultRequest", + "FlowValidationResult", + "ImportFlowRequest", + "FlowImportStrategy", + "ImportFlowResponse", + "ExportFlowRequest", + "ExportFlowResponse", + }, +) + + +class NluSettings(proto.Message): + r"""Settings related to NLU. + + Attributes: + model_type (google.cloud.dialogflowcx_v3.types.NluSettings.ModelType): + Indicates the type of NLU model. + classification_threshold (float): + To filter out false positive results and + still get variety in matched natural language + inputs for your agent, you can tune the machine + learning classification threshold. If the + returned score value is less than the threshold + value, then a no-match event will be triggered. + The score values range from 0.0 (completely + uncertain) to 1.0 (completely certain). If set + to 0.0, the default of 0.3 is used. + model_training_mode (google.cloud.dialogflowcx_v3.types.NluSettings.ModelTrainingMode): + Indicates NLU model training mode. + """ + + class ModelType(proto.Enum): + r"""NLU model type. + + Values: + MODEL_TYPE_UNSPECIFIED (0): + Not specified. ``MODEL_TYPE_STANDARD`` will be used. + MODEL_TYPE_STANDARD (1): + Use standard NLU model. + MODEL_TYPE_ADVANCED (3): + Use advanced NLU model. + """ + MODEL_TYPE_UNSPECIFIED = 0 + MODEL_TYPE_STANDARD = 1 + MODEL_TYPE_ADVANCED = 3 + + class ModelTrainingMode(proto.Enum): + r"""NLU model training mode. + + Values: + MODEL_TRAINING_MODE_UNSPECIFIED (0): + Not specified. ``MODEL_TRAINING_MODE_AUTOMATIC`` will be + used. + MODEL_TRAINING_MODE_AUTOMATIC (1): + NLU model training is automatically triggered + when a flow gets modified. User can also + manually trigger model training in this mode. + MODEL_TRAINING_MODE_MANUAL (2): + User needs to manually trigger NLU model + training. Best for large flows whose models take + long time to train. + """ + MODEL_TRAINING_MODE_UNSPECIFIED = 0 + MODEL_TRAINING_MODE_AUTOMATIC = 1 + MODEL_TRAINING_MODE_MANUAL = 2 + + model_type: ModelType = proto.Field( + proto.ENUM, + number=1, + enum=ModelType, + ) + classification_threshold: float = proto.Field( + proto.FLOAT, + number=3, + ) + model_training_mode: ModelTrainingMode = proto.Field( + proto.ENUM, + number=4, + enum=ModelTrainingMode, + ) + + +class Flow(proto.Message): + r"""Flows represents the conversation flows when you build your + chatbot agent. + A flow consists of many pages connected by the transition + routes. Conversations always start with the built-in Start Flow + (with an all-0 ID). Transition routes can direct the + conversation session from the current flow (parent flow) to + another flow (sub flow). When the sub flow is finished, + Dialogflow will bring the session back to the parent flow, where + the sub flow is started. + + Usually, when a transition route is followed by a matched + intent, the intent will be "consumed". This means the intent + won't activate more transition routes. However, when the + followed transition route moves the conversation session into a + different flow, the matched intent can be carried over and to be + consumed in the target flow. + + Attributes: + name (str): + The unique identifier of the flow. Format: + ``projects//locations//agents//flows/``. + display_name (str): + Required. The human-readable name of the + flow. + description (str): + The description of the flow. The maximum + length is 500 characters. If exceeded, the + request is rejected. + transition_routes (MutableSequence[google.cloud.dialogflowcx_v3.types.TransitionRoute]): + A flow's transition routes serve two purposes: + + - They are responsible for matching the user's first + utterances in the flow. + - They are inherited by every page's [transition + routes][Page.transition_routes] and can support use cases + such as the user saying "help" or "can I talk to a + human?", which can be handled in a common way regardless + of the current page. Transition routes defined in the + page have higher priority than those defined in the flow. + + TransitionRoutes are evalauted in the following order: + + - TransitionRoutes with intent specified. + - TransitionRoutes with only condition specified. + + TransitionRoutes with intent specified are inherited by + pages in the flow. + event_handlers (MutableSequence[google.cloud.dialogflowcx_v3.types.EventHandler]): + A flow's event handlers serve two purposes: + + - They are responsible for handling events (e.g. no match, + webhook errors) in the flow. + - They are inherited by every page's [event + handlers][Page.event_handlers], which can be used to + handle common events regardless of the current page. + Event handlers defined in the page have higher priority + than those defined in the flow. + + Unlike + [transition_routes][google.cloud.dialogflow.cx.v3.Flow.transition_routes], + these handlers are evaluated on a first-match basis. The + first one that matches the event get executed, with the rest + being ignored. + transition_route_groups (MutableSequence[str]): + A flow's transition route group serve two purposes: + + - They are responsible for matching the user's first + utterances in the flow. + - They are inherited by every page's [transition route + groups][Page.transition_route_groups]. Transition route + groups defined in the page have higher priority than + those defined in the flow. + + Format:\ ``projects//locations//agents//flows//transitionRouteGroups/`` + or + ``projects//locations//agents//transitionRouteGroups/`` + for agent-level groups. + nlu_settings (google.cloud.dialogflowcx_v3.types.NluSettings): + NLU related settings of the flow. + knowledge_connector_settings (google.cloud.dialogflowcx_v3.types.KnowledgeConnectorSettings): + Optional. Knowledge connector configuration. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + transition_routes: MutableSequence[page.TransitionRoute] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=page.TransitionRoute, + ) + event_handlers: MutableSequence[page.EventHandler] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=page.EventHandler, + ) + transition_route_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + nlu_settings: "NluSettings" = proto.Field( + proto.MESSAGE, + number=11, + message="NluSettings", + ) + knowledge_connector_settings: page.KnowledgeConnectorSettings = proto.Field( + proto.MESSAGE, + number=18, + message=page.KnowledgeConnectorSettings, + ) + + +class CreateFlowRequest(proto.Message): + r"""The request message for + [Flows.CreateFlow][google.cloud.dialogflow.cx.v3.Flows.CreateFlow]. + + Attributes: + parent (str): + Required. The agent to create a flow for. Format: + ``projects//locations//agents/``. + flow (google.cloud.dialogflowcx_v3.types.Flow): + Required. The flow to create. + language_code (str): + The language of the following fields in ``flow``: + + - ``Flow.event_handlers.trigger_fulfillment.messages`` + - ``Flow.event_handlers.trigger_fulfillment.conditional_cases`` + - ``Flow.transition_routes.trigger_fulfillment.messages`` + - ``Flow.transition_routes.trigger_fulfillment.conditional_cases`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + flow: "Flow" = proto.Field( + proto.MESSAGE, + number=2, + message="Flow", + ) + language_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteFlowRequest(proto.Message): + r"""The request message for + [Flows.DeleteFlow][google.cloud.dialogflow.cx.v3.Flows.DeleteFlow]. + + Attributes: + name (str): + Required. The name of the flow to delete. Format: + ``projects//locations//agents//flows/``. + force (bool): + This field has no effect for flows with no incoming + transitions. For flows with incoming transitions: + + - If ``force`` is set to false, an error will be returned + with message indicating the incoming transitions. + - If ``force`` is set to true, Dialogflow will remove the + flow, as well as any transitions to the flow (i.e. + [Target flow][EventHandler.target_flow] in event handlers + or [Target flow][TransitionRoute.target_flow] in + transition routes that point to this flow will be + cleared). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListFlowsRequest(proto.Message): + r"""The request message for + [Flows.ListFlows][google.cloud.dialogflow.cx.v3.Flows.ListFlows]. + + Attributes: + parent (str): + Required. The agent containing the flows. Format: + ``projects//locations//agents/``. + page_size (int): + The maximum number of items to return in a + single page. By default 100 and at most 1000. + page_token (str): + The next_page_token value returned from a previous list + request. + language_code (str): + The language to list flows for. The following fields are + language dependent: + + - ``Flow.event_handlers.trigger_fulfillment.messages`` + - ``Flow.event_handlers.trigger_fulfillment.conditional_cases`` + - ``Flow.transition_routes.trigger_fulfillment.messages`` + - ``Flow.transition_routes.trigger_fulfillment.conditional_cases`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + language_code: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListFlowsResponse(proto.Message): + r"""The response message for + [Flows.ListFlows][google.cloud.dialogflow.cx.v3.Flows.ListFlows]. + + Attributes: + flows (MutableSequence[google.cloud.dialogflowcx_v3.types.Flow]): + The list of flows. There will be a maximum number of items + returned based on the page_size field in the request. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + flows: MutableSequence["Flow"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Flow", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetFlowRequest(proto.Message): + r"""The response message for + [Flows.GetFlow][google.cloud.dialogflow.cx.v3.Flows.GetFlow]. + + Attributes: + name (str): + Required. The name of the flow to get. Format: + ``projects//locations//agents//flows/``. + language_code (str): + The language to retrieve the flow for. The following fields + are language dependent: + + - ``Flow.event_handlers.trigger_fulfillment.messages`` + - ``Flow.event_handlers.trigger_fulfillment.conditional_cases`` + - ``Flow.transition_routes.trigger_fulfillment.messages`` + - ``Flow.transition_routes.trigger_fulfillment.conditional_cases`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateFlowRequest(proto.Message): + r"""The request message for + [Flows.UpdateFlow][google.cloud.dialogflow.cx.v3.Flows.UpdateFlow]. + + Attributes: + flow (google.cloud.dialogflowcx_v3.types.Flow): + Required. The flow to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get updated. + If the mask is not present, all fields will be + updated. + language_code (str): + The language of the following fields in ``flow``: + + - ``Flow.event_handlers.trigger_fulfillment.messages`` + - ``Flow.event_handlers.trigger_fulfillment.conditional_cases`` + - ``Flow.transition_routes.trigger_fulfillment.messages`` + - ``Flow.transition_routes.trigger_fulfillment.conditional_cases`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + """ + + flow: "Flow" = proto.Field( + proto.MESSAGE, + number=1, + message="Flow", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + language_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class TrainFlowRequest(proto.Message): + r"""The request message for + [Flows.TrainFlow][google.cloud.dialogflow.cx.v3.Flows.TrainFlow]. + + Attributes: + name (str): + Required. The flow to train. Format: + ``projects//locations//agents//flows/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ValidateFlowRequest(proto.Message): + r"""The request message for + [Flows.ValidateFlow][google.cloud.dialogflow.cx.v3.Flows.ValidateFlow]. + + Attributes: + name (str): + Required. The flow to validate. Format: + ``projects//locations//agents//flows/``. + language_code (str): + If not specified, the agent's default + language is used. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetFlowValidationResultRequest(proto.Message): + r"""The request message for + [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. + + Attributes: + name (str): + Required. The flow name. Format: + ``projects//locations//agents//flows//validationResult``. + language_code (str): + If not specified, the agent's default + language is used. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class FlowValidationResult(proto.Message): + r"""The response message for + [Flows.GetFlowValidationResult][google.cloud.dialogflow.cx.v3.Flows.GetFlowValidationResult]. + + Attributes: + name (str): + The unique identifier of the flow validation result. Format: + ``projects//locations//agents//flows//validationResult``. + validation_messages (MutableSequence[google.cloud.dialogflowcx_v3.types.ValidationMessage]): + Contains all validation messages. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last time the flow was validated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validation_messages: MutableSequence[ + validation_message.ValidationMessage + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=validation_message.ValidationMessage, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class ImportFlowRequest(proto.Message): + r"""The request message for + [Flows.ImportFlow][google.cloud.dialogflow.cx.v3.Flows.ImportFlow]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The agent to import the flow into. Format: + ``projects//locations//agents/``. + flow_uri (str): + The `Google Cloud + Storage `__ URI to + import flow from. The format of this URI must be + ``gs:///``. + + Dialogflow performs a read operation for the Cloud Storage + object on the caller's behalf, so your request + authentication must have read permissions for the object. + For more information, see `Dialogflow access + control `__. + + This field is a member of `oneof`_ ``flow``. + flow_content (bytes): + Uncompressed raw byte content for flow. + + This field is a member of `oneof`_ ``flow``. + import_option (google.cloud.dialogflowcx_v3.types.ImportFlowRequest.ImportOption): + Flow import mode. If not specified, ``KEEP`` is assumed. + flow_import_strategy (google.cloud.dialogflowcx_v3.types.FlowImportStrategy): + Optional. Specifies the import strategy used + when resolving resource conflicts. + """ + + class ImportOption(proto.Enum): + r"""Import option. + + Values: + IMPORT_OPTION_UNSPECIFIED (0): + Unspecified. Treated as ``KEEP``. + KEEP (1): + Always respect settings in exported flow + content. It may cause a import failure if some + settings (e.g. custom NLU) are not supported in + the agent to import into. + FALLBACK (2): + Fallback to default settings if some settings + are not supported in the agent to import into. + E.g. Standard NLU will be used if custom NLU is + not available. + """ + IMPORT_OPTION_UNSPECIFIED = 0 + KEEP = 1 + FALLBACK = 2 + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + flow_uri: str = proto.Field( + proto.STRING, + number=2, + oneof="flow", + ) + flow_content: bytes = proto.Field( + proto.BYTES, + number=3, + oneof="flow", + ) + import_option: ImportOption = proto.Field( + proto.ENUM, + number=4, + enum=ImportOption, + ) + flow_import_strategy: "FlowImportStrategy" = proto.Field( + proto.MESSAGE, + number=5, + message="FlowImportStrategy", + ) + + +class FlowImportStrategy(proto.Message): + r"""The flow import strategy used for resource conflict resolution + associated with an + [ImportFlowRequest][google.cloud.dialogflow.cx.v3.ImportFlowRequest]. + + Attributes: + global_import_strategy (google.cloud.dialogflowcx_v3.types.ImportStrategy): + Optional. Import strategy for resource conflict resolution, + applied globally throughout the flow. It will be applied for + all display name conflicts in the imported content. If not + specified, 'CREATE_NEW' is assumed. + """ + + global_import_strategy: import_strategy.ImportStrategy = proto.Field( + proto.ENUM, + number=1, + enum=import_strategy.ImportStrategy, + ) + + +class ImportFlowResponse(proto.Message): + r"""The response message for + [Flows.ImportFlow][google.cloud.dialogflow.cx.v3.Flows.ImportFlow]. + + Attributes: + flow (str): + The unique identifier of the new flow. Format: + ``projects//locations//agents//flows/``. + """ + + flow: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExportFlowRequest(proto.Message): + r"""The request message for + [Flows.ExportFlow][google.cloud.dialogflow.cx.v3.Flows.ExportFlow]. + + Attributes: + name (str): + Required. The name of the flow to export. Format: + ``projects//locations//agents//flows/``. + flow_uri (str): + Optional. The `Google Cloud + Storage `__ URI to + export the flow to. The format of this URI must be + ``gs:///``. If left unspecified, + the serialized flow is returned inline. + + Dialogflow performs a write operation for the Cloud Storage + object on the caller's behalf, so your request + authentication must have write permissions for the object. + For more information, see `Dialogflow access + control `__. + include_referenced_flows (bool): + Optional. Whether to export flows referenced + by the specified flow. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + flow_uri: str = proto.Field( + proto.STRING, + number=2, + ) + include_referenced_flows: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ExportFlowResponse(proto.Message): + r"""The response message for + [Flows.ExportFlow][google.cloud.dialogflow.cx.v3.Flows.ExportFlow]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + flow_uri (str): + The URI to a file containing the exported flow. This field + is populated only if ``flow_uri`` is specified in + [ExportFlowRequest][google.cloud.dialogflow.cx.v3.ExportFlowRequest]. + + This field is a member of `oneof`_ ``flow``. + flow_content (bytes): + Uncompressed raw byte content for flow. + + This field is a member of `oneof`_ ``flow``. + """ + + flow_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="flow", + ) + flow_content: bytes = proto.Field( + proto.BYTES, + number=2, + oneof="flow", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/fulfillment.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/fulfillment.py new file mode 100644 index 000000000000..e477b5abc3f5 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/fulfillment.py @@ -0,0 +1,223 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.dialogflowcx_v3.types import response_message + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "Fulfillment", + }, +) + + +class Fulfillment(proto.Message): + r"""A fulfillment can do one or more of the following actions at the + same time: + + - Generate rich message responses. + - Set parameter values. + - Call the webhook. + + Fulfillments can be called at various stages in the + [Page][google.cloud.dialogflow.cx.v3.Page] or + [Form][google.cloud.dialogflow.cx.v3.Form] lifecycle. For example, + when a + [DetectIntentRequest][google.cloud.dialogflow.cx.v3.DetectIntentRequest] + drives a session to enter a new page, the page's entry fulfillment + can add a static response to the + [QueryResult][google.cloud.dialogflow.cx.v3.QueryResult] in the + returning + [DetectIntentResponse][google.cloud.dialogflow.cx.v3.DetectIntentResponse], + call the webhook (for example, to load user data from a database), + or both. + + Attributes: + messages (MutableSequence[google.cloud.dialogflowcx_v3.types.ResponseMessage]): + The list of rich message responses to present + to the user. + webhook (str): + The webhook to call. Format: + ``projects//locations//agents//webhooks/``. + return_partial_responses (bool): + Whether Dialogflow should return currently + queued fulfillment response messages in + streaming APIs. If a webhook is specified, it + happens before Dialogflow invokes webhook. + Warning: + + 1) This flag only affects streaming API. + Responses are still queued and returned once in + non-streaming API. + 2) The flag can be enabled in any fulfillment + but only the first 3 partial responses will be + returned. You may only want to apply it to + fulfillments that have slow webhooks. + tag (str): + The value of this field will be populated in the + [WebhookRequest][google.cloud.dialogflow.cx.v3.WebhookRequest] + ``fulfillmentInfo.tag`` field by Dialogflow when the + associated webhook is called. The tag is typically used by + the webhook service to identify which fulfillment is being + called, but it could be used for other purposes. This field + is required if ``webhook`` is specified. + set_parameter_actions (MutableSequence[google.cloud.dialogflowcx_v3.types.Fulfillment.SetParameterAction]): + Set parameter values before executing the + webhook. + conditional_cases (MutableSequence[google.cloud.dialogflowcx_v3.types.Fulfillment.ConditionalCases]): + Conditional cases for this fulfillment. + """ + + class SetParameterAction(proto.Message): + r"""Setting a parameter value. + + Attributes: + parameter (str): + Display name of the parameter. + value (google.protobuf.struct_pb2.Value): + The new value of the parameter. A null value + clears the parameter. + """ + + parameter: str = proto.Field( + proto.STRING, + number=1, + ) + value: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + + class ConditionalCases(proto.Message): + r"""A list of cascading if-else conditions. Cases are mutually + exclusive. The first one with a matching condition is selected, + all the rest ignored. + + Attributes: + cases (MutableSequence[google.cloud.dialogflowcx_v3.types.Fulfillment.ConditionalCases.Case]): + A list of cascading if-else conditions. + """ + + class Case(proto.Message): + r"""Each case has a Boolean condition. When it is evaluated to be + True, the corresponding messages will be selected and evaluated + recursively. + + Attributes: + condition (str): + The condition to activate and select this case. Empty means + the condition is always true. The condition is evaluated + against [form parameters][Form.parameters] or [session + parameters][SessionInfo.parameters]. + + See the `conditions + reference `__. + case_content (MutableSequence[google.cloud.dialogflowcx_v3.types.Fulfillment.ConditionalCases.Case.CaseContent]): + A list of case content. + """ + + class CaseContent(proto.Message): + r"""The list of messages or conditional cases to activate for + this case. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + message (google.cloud.dialogflowcx_v3.types.ResponseMessage): + Returned message. + + This field is a member of `oneof`_ ``cases_or_message``. + additional_cases (google.cloud.dialogflowcx_v3.types.Fulfillment.ConditionalCases): + Additional cases to be evaluated. + + This field is a member of `oneof`_ ``cases_or_message``. + """ + + message: response_message.ResponseMessage = proto.Field( + proto.MESSAGE, + number=1, + oneof="cases_or_message", + message=response_message.ResponseMessage, + ) + additional_cases: "Fulfillment.ConditionalCases" = proto.Field( + proto.MESSAGE, + number=2, + oneof="cases_or_message", + message="Fulfillment.ConditionalCases", + ) + + condition: str = proto.Field( + proto.STRING, + number=1, + ) + case_content: MutableSequence[ + "Fulfillment.ConditionalCases.Case.CaseContent" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Fulfillment.ConditionalCases.Case.CaseContent", + ) + + cases: MutableSequence[ + "Fulfillment.ConditionalCases.Case" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Fulfillment.ConditionalCases.Case", + ) + + messages: MutableSequence[response_message.ResponseMessage] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=response_message.ResponseMessage, + ) + webhook: str = proto.Field( + proto.STRING, + number=2, + ) + return_partial_responses: bool = proto.Field( + proto.BOOL, + number=8, + ) + tag: str = proto.Field( + proto.STRING, + number=3, + ) + set_parameter_actions: MutableSequence[SetParameterAction] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=SetParameterAction, + ) + conditional_cases: MutableSequence[ConditionalCases] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=ConditionalCases, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/gcs.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/gcs.py new file mode 100644 index 000000000000..6cb00607b448 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/gcs.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "GcsDestination", + }, +) + + +class GcsDestination(proto.Message): + r"""Google Cloud Storage location for a Dialogflow operation that + writes or exports objects (e.g. exported agent or transcripts) + outside of Dialogflow. + + Attributes: + uri (str): + Required. The Google Cloud Storage URI for the exported + objects. A URI is of the form: + ``gs://bucket/object-name-or-prefix`` Whether a full object + name, or just a prefix, its usage depends on the Dialogflow + operation. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/generative_settings.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/generative_settings.py new file mode 100644 index 000000000000..da0f15209b73 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/generative_settings.py @@ -0,0 +1,176 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dialogflowcx_v3.types import safety_settings + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "GenerativeSettings", + }, +) + + +class GenerativeSettings(proto.Message): + r"""Settings for Generative AI. + + Attributes: + name (str): + Format: + ``projects//locations//agents//generativeSettings``. + fallback_settings (google.cloud.dialogflowcx_v3.types.GenerativeSettings.FallbackSettings): + Settings for Generative Fallback. + generative_safety_settings (google.cloud.dialogflowcx_v3.types.SafetySettings): + Settings for Generative Safety. + knowledge_connector_settings (google.cloud.dialogflowcx_v3.types.GenerativeSettings.KnowledgeConnectorSettings): + Settings for knowledge connector. + language_code (str): + Language for this settings. + """ + + class FallbackSettings(proto.Message): + r"""Settings for Generative Fallback. + + Attributes: + selected_prompt (str): + Display name of the selected prompt. + prompt_templates (MutableSequence[google.cloud.dialogflowcx_v3.types.GenerativeSettings.FallbackSettings.PromptTemplate]): + Stored prompts that can be selected, for + example default templates like "conservative" or + "chatty", or user defined ones. + """ + + class PromptTemplate(proto.Message): + r"""Prompt template. + + Attributes: + display_name (str): + Prompt name. + prompt_text (str): + Prompt text that is sent to a LLM on no-match + default, placeholders are filled downstream. For + example: "Here is a conversation $conversation, + a response is: ". + frozen (bool): + If the flag is true, the prompt is frozen and + cannot be modified by users. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + prompt_text: str = proto.Field( + proto.STRING, + number=2, + ) + frozen: bool = proto.Field( + proto.BOOL, + number=3, + ) + + selected_prompt: str = proto.Field( + proto.STRING, + number=3, + ) + prompt_templates: MutableSequence[ + "GenerativeSettings.FallbackSettings.PromptTemplate" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="GenerativeSettings.FallbackSettings.PromptTemplate", + ) + + class KnowledgeConnectorSettings(proto.Message): + r"""Settings for knowledge connector. These parameters are used for LLM + prompt like "You are . You are a helpful and verbose + at , . Your task is to help + humans on ". + + Attributes: + business (str): + Name of the company, organization or other + entity that the agent represents. Used for + knowledge connector LLM prompt and for knowledge + search. + agent (str): + Name of the virtual agent. Used for LLM + prompt. Can be left empty. + agent_identity (str): + Identity of the agent, e.g. "virtual agent", + "AI assistant". + business_description (str): + Company description, used for LLM prompt, + e.g. "a family company selling freshly roasted + coffee beans". + agent_scope (str): + Agent scope, e.g. "Example company website", + "internal Example company website for + employees", "manual of car owner". + """ + + business: str = proto.Field( + proto.STRING, + number=1, + ) + agent: str = proto.Field( + proto.STRING, + number=2, + ) + agent_identity: str = proto.Field( + proto.STRING, + number=3, + ) + business_description: str = proto.Field( + proto.STRING, + number=4, + ) + agent_scope: str = proto.Field( + proto.STRING, + number=5, + ) + + name: str = proto.Field( + proto.STRING, + number=5, + ) + fallback_settings: FallbackSettings = proto.Field( + proto.MESSAGE, + number=1, + message=FallbackSettings, + ) + generative_safety_settings: safety_settings.SafetySettings = proto.Field( + proto.MESSAGE, + number=3, + message=safety_settings.SafetySettings, + ) + knowledge_connector_settings: KnowledgeConnectorSettings = proto.Field( + proto.MESSAGE, + number=7, + message=KnowledgeConnectorSettings, + ) + language_code: str = proto.Field( + proto.STRING, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/import_strategy.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/import_strategy.py new file mode 100644 index 000000000000..c9762942e4c8 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/import_strategy.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "ImportStrategy", + }, +) + + +class ImportStrategy(proto.Enum): + r"""Import strategies for the conflict resolution of resources + (i.e. intents, entities, and webhooks) with identical display + names during import operations. + + Values: + IMPORT_STRATEGY_UNSPECIFIED (0): + Unspecified. Treated as 'CREATE_NEW'. + IMPORT_STRATEGY_CREATE_NEW (1): + Create a new resource with a numeric suffix + appended to the end of the existing display + name. + IMPORT_STRATEGY_REPLACE (2): + Replace existing resource with incoming + resource in the content to be imported. + IMPORT_STRATEGY_KEEP (3): + Keep existing resource and discard incoming + resource in the content to be imported. + IMPORT_STRATEGY_MERGE (4): + Combine existing and incoming resources when + a conflict is encountered. + IMPORT_STRATEGY_THROW_ERROR (5): + Throw error if a conflict is encountered. + """ + IMPORT_STRATEGY_UNSPECIFIED = 0 + IMPORT_STRATEGY_CREATE_NEW = 1 + IMPORT_STRATEGY_REPLACE = 2 + IMPORT_STRATEGY_KEEP = 3 + IMPORT_STRATEGY_MERGE = 4 + IMPORT_STRATEGY_THROW_ERROR = 5 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/intent.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/intent.py new file mode 100644 index 000000000000..3cae590ad252 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/intent.py @@ -0,0 +1,487 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "IntentView", + "Intent", + "ListIntentsRequest", + "ListIntentsResponse", + "GetIntentRequest", + "CreateIntentRequest", + "UpdateIntentRequest", + "DeleteIntentRequest", + }, +) + + +class IntentView(proto.Enum): + r"""Represents the options for views of an intent. + An intent can be a sizable object. Therefore, we provide a + resource view that does not return training phrases in the + response. + + Values: + INTENT_VIEW_UNSPECIFIED (0): + Not specified. Treated as INTENT_VIEW_FULL. + INTENT_VIEW_PARTIAL (1): + Training phrases field is not populated in + the response. + INTENT_VIEW_FULL (2): + All fields are populated. + """ + INTENT_VIEW_UNSPECIFIED = 0 + INTENT_VIEW_PARTIAL = 1 + INTENT_VIEW_FULL = 2 + + +class Intent(proto.Message): + r"""An intent represents a user's intent to interact with a + conversational agent. + You can provide information for the Dialogflow API to use to + match user input to an intent by adding training phrases (i.e., + examples of user input) to your intent. + + Attributes: + name (str): + The unique identifier of the intent. Required for the + [Intents.UpdateIntent][google.cloud.dialogflow.cx.v3.Intents.UpdateIntent] + method. + [Intents.CreateIntent][google.cloud.dialogflow.cx.v3.Intents.CreateIntent] + populates the name automatically. Format: + ``projects//locations//agents//intents/``. + display_name (str): + Required. The human-readable name of the + intent, unique within the agent. + training_phrases (MutableSequence[google.cloud.dialogflowcx_v3.types.Intent.TrainingPhrase]): + The collection of training phrases the agent + is trained on to identify the intent. + parameters (MutableSequence[google.cloud.dialogflowcx_v3.types.Intent.Parameter]): + The collection of parameters associated with + the intent. + priority (int): + The priority of this intent. Higher numbers represent higher + priorities. + + - If the supplied value is unspecified or 0, the service + translates the value to 500,000, which corresponds to the + ``Normal`` priority in the console. + - If the supplied value is negative, the intent is ignored + in runtime detect intent requests. + is_fallback (bool): + Indicates whether this is a fallback intent. + Currently only default fallback intent is + allowed in the agent, which is added upon agent + creation. + Adding training phrases to fallback intent is + useful in the case of requests that are + mistakenly matched, since training phrases + assigned to fallback intents act as negative + examples that triggers no-match event. + labels (MutableMapping[str, str]): + The key/value metadata to label an intent. Labels can + contain lowercase letters, digits and the symbols '-' and + '_'. International characters are allowed, including letters + from unicase alphabets. Keys must start with a letter. Keys + and values can be no longer than 63 characters and no more + than 128 bytes. + + Prefix "sys-" is reserved for Dialogflow defined labels. + Currently allowed Dialogflow defined labels include: + + - sys-head + - sys-contextual The above labels do not require value. + "sys-head" means the intent is a head intent. + "sys.contextual" means the intent is a contextual intent. + description (str): + Human readable description for better + understanding an intent like its scope, content, + result etc. Maximum character limit: 140 + characters. + """ + + class TrainingPhrase(proto.Message): + r"""Represents an example that the agent is trained on to + identify the intent. + + Attributes: + id (str): + Output only. The unique identifier of the + training phrase. + parts (MutableSequence[google.cloud.dialogflowcx_v3.types.Intent.TrainingPhrase.Part]): + Required. The ordered list of training phrase parts. The + parts are concatenated in order to form the training phrase. + + Note: The API does not automatically annotate training + phrases like the Dialogflow Console does. + + Note: Do not forget to include whitespace at part + boundaries, so the training phrase is well formatted when + the parts are concatenated. + + If the training phrase does not need to be annotated with + parameters, you just need a single part with only the + [Part.text][google.cloud.dialogflow.cx.v3.Intent.TrainingPhrase.Part.text] + field set. + + If you want to annotate the training phrase, you must create + multiple parts, where the fields of each part are populated + in one of two ways: + + - ``Part.text`` is set to a part of the phrase that has no + parameters. + - ``Part.text`` is set to a part of the phrase that you + want to annotate, and the ``parameter_id`` field is set. + repeat_count (int): + Indicates how many times this example was + added to the intent. + """ + + class Part(proto.Message): + r"""Represents a part of a training phrase. + + Attributes: + text (str): + Required. The text for this part. + parameter_id (str): + The + [parameter][google.cloud.dialogflow.cx.v3.Intent.Parameter] + used to annotate this part of the training phrase. This + field is required for annotated parts of the training + phrase. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + parameter_id: str = proto.Field( + proto.STRING, + number=2, + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + parts: MutableSequence["Intent.TrainingPhrase.Part"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Intent.TrainingPhrase.Part", + ) + repeat_count: int = proto.Field( + proto.INT32, + number=3, + ) + + class Parameter(proto.Message): + r"""Represents an intent parameter. + + Attributes: + id (str): + Required. The unique identifier of the parameter. This field + is used by [training + phrases][google.cloud.dialogflow.cx.v3.Intent.TrainingPhrase] + to annotate their + [parts][google.cloud.dialogflow.cx.v3.Intent.TrainingPhrase.Part]. + entity_type (str): + Required. The entity type of the parameter. Format: + ``projects/-/locations/-/agents/-/entityTypes/`` + for system entity types (for example, + ``projects/-/locations/-/agents/-/entityTypes/sys.date``), + or + ``projects//locations//agents//entityTypes/`` + for developer entity types. + is_list (bool): + Indicates whether the parameter represents a + list of values. + redact (bool): + Indicates whether the parameter content should be redacted + in log. If redaction is enabled, the parameter content will + be replaced by parameter name during logging. Note: the + parameter content is subject to redaction if either + parameter level redaction or [entity type level + redaction][google.cloud.dialogflow.cx.v3.EntityType.redact] + is enabled. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + entity_type: str = proto.Field( + proto.STRING, + number=2, + ) + is_list: bool = proto.Field( + proto.BOOL, + number=3, + ) + redact: bool = proto.Field( + proto.BOOL, + number=4, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + training_phrases: MutableSequence[TrainingPhrase] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=TrainingPhrase, + ) + parameters: MutableSequence[Parameter] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=Parameter, + ) + priority: int = proto.Field( + proto.INT32, + number=5, + ) + is_fallback: bool = proto.Field( + proto.BOOL, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + description: str = proto.Field( + proto.STRING, + number=8, + ) + + +class ListIntentsRequest(proto.Message): + r"""The request message for + [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents]. + + Attributes: + parent (str): + Required. The agent to list all intents for. Format: + ``projects//locations//agents/``. + language_code (str): + The language to list intents for. The following fields are + language dependent: + + - ``Intent.training_phrases.parts.text`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + intent_view (google.cloud.dialogflowcx_v3.types.IntentView): + The resource view to apply to the returned + intent. + page_size (int): + The maximum number of items to return in a + single page. By default 100 and at most 1000. + page_token (str): + The next_page_token value returned from a previous list + request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + intent_view: "IntentView" = proto.Field( + proto.ENUM, + number=5, + enum="IntentView", + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListIntentsResponse(proto.Message): + r"""The response message for + [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents]. + + Attributes: + intents (MutableSequence[google.cloud.dialogflowcx_v3.types.Intent]): + The list of intents. There will be a maximum number of items + returned based on the page_size field in the request. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + intents: MutableSequence["Intent"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Intent", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetIntentRequest(proto.Message): + r"""The request message for + [Intents.GetIntent][google.cloud.dialogflow.cx.v3.Intents.GetIntent]. + + Attributes: + name (str): + Required. The name of the intent. Format: + ``projects//locations//agents//intents/``. + language_code (str): + The language to retrieve the intent for. The following + fields are language dependent: + + - ``Intent.training_phrases.parts.text`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateIntentRequest(proto.Message): + r"""The request message for + [Intents.CreateIntent][google.cloud.dialogflow.cx.v3.Intents.CreateIntent]. + + Attributes: + parent (str): + Required. The agent to create an intent for. Format: + ``projects//locations//agents/``. + intent (google.cloud.dialogflowcx_v3.types.Intent): + Required. The intent to create. + language_code (str): + The language of the following fields in ``intent``: + + - ``Intent.training_phrases.parts.text`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + intent: "Intent" = proto.Field( + proto.MESSAGE, + number=2, + message="Intent", + ) + language_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateIntentRequest(proto.Message): + r"""The request message for + [Intents.UpdateIntent][google.cloud.dialogflow.cx.v3.Intents.UpdateIntent]. + + Attributes: + intent (google.cloud.dialogflowcx_v3.types.Intent): + Required. The intent to update. + language_code (str): + The language of the following fields in ``intent``: + + - ``Intent.training_phrases.parts.text`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get updated. + If the mask is not present, all fields will be + updated. + """ + + intent: "Intent" = proto.Field( + proto.MESSAGE, + number=1, + message="Intent", + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteIntentRequest(proto.Message): + r"""The request message for + [Intents.DeleteIntent][google.cloud.dialogflow.cx.v3.Intents.DeleteIntent]. + + Attributes: + name (str): + Required. The name of the intent to delete. Format: + ``projects//locations//agents//intents/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/page.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/page.py new file mode 100644 index 000000000000..323551e02028 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/page.py @@ -0,0 +1,847 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.dialogflowcx_v3.types import data_store_connection, fulfillment + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "Page", + "Form", + "EventHandler", + "TransitionRoute", + "ListPagesRequest", + "ListPagesResponse", + "GetPageRequest", + "CreatePageRequest", + "UpdatePageRequest", + "DeletePageRequest", + "KnowledgeConnectorSettings", + }, +) + + +class Page(proto.Message): + r"""A Dialogflow CX conversation (session) can be described and + visualized as a state machine. The states of a CX session are + represented by pages. + + For each flow, you define many pages, where your combined pages can + handle a complete conversation on the topics the flow is designed + for. At any given moment, exactly one page is the current page, the + current page is considered active, and the flow associated with that + page is considered active. Every flow has a special start page. When + a flow initially becomes active, the start page page becomes the + current page. For each conversational turn, the current page will + either stay the same or transition to another page. + + You configure each page to collect information from the end-user + that is relevant for the conversational state represented by the + page. + + For more information, see the `Page + guide `__. + + Attributes: + name (str): + The unique identifier of the page. Required for the + [Pages.UpdatePage][google.cloud.dialogflow.cx.v3.Pages.UpdatePage] + method. + [Pages.CreatePage][google.cloud.dialogflow.cx.v3.Pages.CreatePage] + populates the name automatically. Format: + ``projects//locations//agents//flows//pages/``. + display_name (str): + Required. The human-readable name of the + page, unique within the flow. + entry_fulfillment (google.cloud.dialogflowcx_v3.types.Fulfillment): + The fulfillment to call when the session is + entering the page. + form (google.cloud.dialogflowcx_v3.types.Form): + The form associated with the page, used for + collecting parameters relevant to the page. + transition_route_groups (MutableSequence[str]): + Ordered list of + [``TransitionRouteGroups``][google.cloud.dialogflow.cx.v3.TransitionRouteGroup] + added to the page. Transition route groups must be unique + within a page. If the page links both flow-level transition + route groups and agent-level transition route groups, the + flow-level ones will have higher priority and will be put + before the agent-level ones. + + - If multiple transition routes within a page scope refer + to the same intent, then the precedence order is: page's + transition route -> page's transition route group -> + flow's transition routes. + + - If multiple transition route groups within a page contain + the same intent, then the first group in the ordered list + takes precedence. + + Format:\ ``projects//locations//agents//flows//transitionRouteGroups/`` + or + ``projects//locations//agents//transitionRouteGroups/`` + for agent-level groups. + transition_routes (MutableSequence[google.cloud.dialogflowcx_v3.types.TransitionRoute]): + A list of transitions for the transition rules of this page. + They route the conversation to another page in the same + flow, or another flow. + + When we are in a certain page, the TransitionRoutes are + evalauted in the following order: + + - TransitionRoutes defined in the page with intent + specified. + - TransitionRoutes defined in the [transition route + groups][google.cloud.dialogflow.cx.v3.Page.transition_route_groups] + with intent specified. + - TransitionRoutes defined in flow with intent specified. + - TransitionRoutes defined in the [transition route + groups][google.cloud.dialogflow.cx.v3.Flow.transition_route_groups] + with intent specified. + - TransitionRoutes defined in the page with only condition + specified. + - TransitionRoutes defined in the [transition route + groups][google.cloud.dialogflow.cx.v3.Page.transition_route_groups] + with only condition specified. + event_handlers (MutableSequence[google.cloud.dialogflowcx_v3.types.EventHandler]): + Handlers associated with the page to handle + events such as webhook errors, no match or no + input. + knowledge_connector_settings (google.cloud.dialogflowcx_v3.types.KnowledgeConnectorSettings): + Optional. Knowledge connector configuration. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + entry_fulfillment: fulfillment.Fulfillment = proto.Field( + proto.MESSAGE, + number=7, + message=fulfillment.Fulfillment, + ) + form: "Form" = proto.Field( + proto.MESSAGE, + number=4, + message="Form", + ) + transition_route_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=11, + ) + transition_routes: MutableSequence["TransitionRoute"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="TransitionRoute", + ) + event_handlers: MutableSequence["EventHandler"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="EventHandler", + ) + knowledge_connector_settings: "KnowledgeConnectorSettings" = proto.Field( + proto.MESSAGE, + number=18, + message="KnowledgeConnectorSettings", + ) + + +class Form(proto.Message): + r"""A form is a data model that groups related parameters that can be + collected from the user. The process in which the agent prompts the + user and collects parameter values from the user is called form + filling. A form can be added to a + [page][google.cloud.dialogflow.cx.v3.Page]. When form filling is + done, the filled parameters will be written to the + [session][google.cloud.dialogflow.cx.v3.SessionInfo.parameters]. + + Attributes: + parameters (MutableSequence[google.cloud.dialogflowcx_v3.types.Form.Parameter]): + Parameters to collect from the user. + """ + + class Parameter(proto.Message): + r"""Represents a form parameter. + + Attributes: + display_name (str): + Required. The human-readable name of the + parameter, unique within the form. + required (bool): + Indicates whether the parameter is required. + Optional parameters will not trigger prompts; + however, they are filled if the user specifies + them. Required parameters must be filled before + form filling concludes. + entity_type (str): + Required. The entity type of the parameter. Format: + ``projects/-/locations/-/agents/-/entityTypes/`` + for system entity types (for example, + ``projects/-/locations/-/agents/-/entityTypes/sys.date``), + or + ``projects//locations//agents//entityTypes/`` + for developer entity types. + is_list (bool): + Indicates whether the parameter represents a + list of values. + fill_behavior (google.cloud.dialogflowcx_v3.types.Form.Parameter.FillBehavior): + Required. Defines fill behavior for the + parameter. + default_value (google.protobuf.struct_pb2.Value): + The default value of an optional parameter. + If the parameter is required, the default value + will be ignored. + redact (bool): + Indicates whether the parameter content should be redacted + in log. If redaction is enabled, the parameter content will + be replaced by parameter name during logging. Note: the + parameter content is subject to redaction if either + parameter level redaction or [entity type level + redaction][google.cloud.dialogflow.cx.v3.EntityType.redact] + is enabled. + """ + + class FillBehavior(proto.Message): + r"""Configuration for how the filling of a parameter should be + handled. + + Attributes: + initial_prompt_fulfillment (google.cloud.dialogflowcx_v3.types.Fulfillment): + Required. The fulfillment to provide the + initial prompt that the agent can present to the + user in order to fill the parameter. + reprompt_event_handlers (MutableSequence[google.cloud.dialogflowcx_v3.types.EventHandler]): + The handlers for parameter-level events, used to provide + reprompt for the parameter or transition to a different + page/flow. The supported events are: + + - ``sys.no-match-``, where N can be from 1 to 6 + - ``sys.no-match-default`` + - ``sys.no-input-``, where N can be from 1 to 6 + - ``sys.no-input-default`` + - ``sys.invalid-parameter`` + + ``initial_prompt_fulfillment`` provides the first prompt for + the parameter. + + If the user's response does not fill the parameter, a + no-match/no-input event will be triggered, and the + fulfillment associated with the + ``sys.no-match-1``/``sys.no-input-1`` handler (if defined) + will be called to provide a prompt. The + ``sys.no-match-2``/``sys.no-input-2`` handler (if defined) + will respond to the next no-match/no-input event, and so on. + + A ``sys.no-match-default`` or ``sys.no-input-default`` + handler will be used to handle all following + no-match/no-input events after all numbered + no-match/no-input handlers for the parameter are consumed. + + A ``sys.invalid-parameter`` handler can be defined to handle + the case where the parameter values have been + ``invalidated`` by webhook. For example, if the user's + response fill the parameter, however the parameter was + invalidated by webhook, the fulfillment associated with the + ``sys.invalid-parameter`` handler (if defined) will be + called to provide a prompt. + + If the event handler for the corresponding event can't be + found on the parameter, ``initial_prompt_fulfillment`` will + be re-prompted. + """ + + initial_prompt_fulfillment: fulfillment.Fulfillment = proto.Field( + proto.MESSAGE, + number=3, + message=fulfillment.Fulfillment, + ) + reprompt_event_handlers: MutableSequence[ + "EventHandler" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="EventHandler", + ) + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + required: bool = proto.Field( + proto.BOOL, + number=2, + ) + entity_type: str = proto.Field( + proto.STRING, + number=3, + ) + is_list: bool = proto.Field( + proto.BOOL, + number=4, + ) + fill_behavior: "Form.Parameter.FillBehavior" = proto.Field( + proto.MESSAGE, + number=7, + message="Form.Parameter.FillBehavior", + ) + default_value: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=9, + message=struct_pb2.Value, + ) + redact: bool = proto.Field( + proto.BOOL, + number=11, + ) + + parameters: MutableSequence[Parameter] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Parameter, + ) + + +class EventHandler(proto.Message): + r"""An event handler specifies an + [event][google.cloud.dialogflow.cx.v3.EventHandler.event] that can + be handled during a session. When the specified event happens, the + following actions are taken in order: + + - If there is a + [``trigger_fulfillment``][google.cloud.dialogflow.cx.v3.EventHandler.trigger_fulfillment] + associated with the event, it will be called. + - If there is a + [``target_page``][google.cloud.dialogflow.cx.v3.EventHandler.target_page] + associated with the event, the session will transition into the + specified page. + - If there is a + [``target_flow``][google.cloud.dialogflow.cx.v3.EventHandler.target_flow] + associated with the event, the session will transition into the + specified flow. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The unique identifier of this + event handler. + event (str): + Required. The name of the event to handle. + trigger_fulfillment (google.cloud.dialogflowcx_v3.types.Fulfillment): + The fulfillment to call when the event + occurs. Handling webhook errors with a + fulfillment enabled with webhook could cause + infinite loop. It is invalid to specify such + fulfillment for a handler handling webhooks. + target_page (str): + The target page to transition to. Format: + ``projects//locations//agents//flows//pages/``. + + This field is a member of `oneof`_ ``target``. + target_flow (str): + The target flow to transition to. Format: + ``projects//locations//agents//flows/``. + + This field is a member of `oneof`_ ``target``. + """ + + name: str = proto.Field( + proto.STRING, + number=6, + ) + event: str = proto.Field( + proto.STRING, + number=4, + ) + trigger_fulfillment: fulfillment.Fulfillment = proto.Field( + proto.MESSAGE, + number=5, + message=fulfillment.Fulfillment, + ) + target_page: str = proto.Field( + proto.STRING, + number=2, + oneof="target", + ) + target_flow: str = proto.Field( + proto.STRING, + number=3, + oneof="target", + ) + + +class TransitionRoute(proto.Message): + r"""A transition route specifies a + [intent][google.cloud.dialogflow.cx.v3.Intent] that can be matched + and/or a data condition that can be evaluated during a session. When + a specified transition is matched, the following actions are taken + in order: + + - If there is a + [``trigger_fulfillment``][google.cloud.dialogflow.cx.v3.TransitionRoute.trigger_fulfillment] + associated with the transition, it will be called. + - If there is a + [``target_page``][google.cloud.dialogflow.cx.v3.TransitionRoute.target_page] + associated with the transition, the session will transition into + the specified page. + - If there is a + [``target_flow``][google.cloud.dialogflow.cx.v3.TransitionRoute.target_flow] + associated with the transition, the session will transition into + the specified flow. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The unique identifier of this + transition route. + description (str): + Optional. The description of the transition + route. The maximum length is 500 characters. + intent (str): + The unique identifier of an + [Intent][google.cloud.dialogflow.cx.v3.Intent]. Format: + ``projects//locations//agents//intents/``. + Indicates that the transition can only happen when the given + intent is matched. At least one of ``intent`` or + ``condition`` must be specified. When both ``intent`` and + ``condition`` are specified, the transition can only happen + when both are fulfilled. + condition (str): + The condition to evaluate against [form + parameters][google.cloud.dialogflow.cx.v3.Form.parameters] + or [session + parameters][google.cloud.dialogflow.cx.v3.SessionInfo.parameters]. + + See the `conditions + reference `__. + At least one of ``intent`` or ``condition`` must be + specified. When both ``intent`` and ``condition`` are + specified, the transition can only happen when both are + fulfilled. + trigger_fulfillment (google.cloud.dialogflowcx_v3.types.Fulfillment): + The fulfillment to call when the condition is satisfied. At + least one of ``trigger_fulfillment`` and ``target`` must be + specified. When both are defined, ``trigger_fulfillment`` is + executed first. + target_page (str): + The target page to transition to. Format: + ``projects//locations//agents//flows//pages/``. + + This field is a member of `oneof`_ ``target``. + target_flow (str): + The target flow to transition to. Format: + ``projects//locations//agents//flows/``. + + This field is a member of `oneof`_ ``target``. + """ + + name: str = proto.Field( + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=8, + ) + intent: str = proto.Field( + proto.STRING, + number=1, + ) + condition: str = proto.Field( + proto.STRING, + number=2, + ) + trigger_fulfillment: fulfillment.Fulfillment = proto.Field( + proto.MESSAGE, + number=3, + message=fulfillment.Fulfillment, + ) + target_page: str = proto.Field( + proto.STRING, + number=4, + oneof="target", + ) + target_flow: str = proto.Field( + proto.STRING, + number=5, + oneof="target", + ) + + +class ListPagesRequest(proto.Message): + r"""The request message for + [Pages.ListPages][google.cloud.dialogflow.cx.v3.Pages.ListPages]. + + Attributes: + parent (str): + Required. The flow to list all pages for. Format: + ``projects//locations//agents//flows/``. + language_code (str): + The language to list pages for. The following fields are + language dependent: + + - ``Page.entry_fulfillment.messages`` + - ``Page.entry_fulfillment.conditional_cases`` + - ``Page.event_handlers.trigger_fulfillment.messages`` + - ``Page.event_handlers.trigger_fulfillment.conditional_cases`` + - ``Page.form.parameters.fill_behavior.initial_prompt_fulfillment.messages`` + - + + ``Page.form.parameters.fill_behavior.initial_prompt_fulfillment.conditional_cases`` + + - ``Page.form.parameters.fill_behavior.reprompt_event_handlers.messages`` + - + + ``Page.form.parameters.fill_behavior.reprompt_event_handlers.conditional_cases`` + + - ``Page.transition_routes.trigger_fulfillment.messages`` + - ``Page.transition_routes.trigger_fulfillment.conditional_cases`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + page_size (int): + The maximum number of items to return in a + single page. By default 100 and at most 1000. + page_token (str): + The next_page_token value returned from a previous list + request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListPagesResponse(proto.Message): + r"""The response message for + [Pages.ListPages][google.cloud.dialogflow.cx.v3.Pages.ListPages]. + + Attributes: + pages (MutableSequence[google.cloud.dialogflowcx_v3.types.Page]): + The list of pages. There will be a maximum number of items + returned based on the page_size field in the request. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + pages: MutableSequence["Page"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Page", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetPageRequest(proto.Message): + r"""The request message for + [Pages.GetPage][google.cloud.dialogflow.cx.v3.Pages.GetPage]. + + Attributes: + name (str): + Required. The name of the page. Format: + ``projects//locations//agents//flows//pages/``. + language_code (str): + The language to retrieve the page for. The following fields + are language dependent: + + - ``Page.entry_fulfillment.messages`` + - ``Page.entry_fulfillment.conditional_cases`` + - ``Page.event_handlers.trigger_fulfillment.messages`` + - ``Page.event_handlers.trigger_fulfillment.conditional_cases`` + - ``Page.form.parameters.fill_behavior.initial_prompt_fulfillment.messages`` + - + + ``Page.form.parameters.fill_behavior.initial_prompt_fulfillment.conditional_cases`` + + - ``Page.form.parameters.fill_behavior.reprompt_event_handlers.messages`` + - + + ``Page.form.parameters.fill_behavior.reprompt_event_handlers.conditional_cases`` + + - ``Page.transition_routes.trigger_fulfillment.messages`` + - ``Page.transition_routes.trigger_fulfillment.conditional_cases`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreatePageRequest(proto.Message): + r"""The request message for + [Pages.CreatePage][google.cloud.dialogflow.cx.v3.Pages.CreatePage]. + + Attributes: + parent (str): + Required. The flow to create a page for. Format: + ``projects//locations//agents//flows/``. + page (google.cloud.dialogflowcx_v3.types.Page): + Required. The page to create. + language_code (str): + The language of the following fields in ``page``: + + - ``Page.entry_fulfillment.messages`` + - ``Page.entry_fulfillment.conditional_cases`` + - ``Page.event_handlers.trigger_fulfillment.messages`` + - ``Page.event_handlers.trigger_fulfillment.conditional_cases`` + - ``Page.form.parameters.fill_behavior.initial_prompt_fulfillment.messages`` + - + + ``Page.form.parameters.fill_behavior.initial_prompt_fulfillment.conditional_cases`` + + - ``Page.form.parameters.fill_behavior.reprompt_event_handlers.messages`` + - + + ``Page.form.parameters.fill_behavior.reprompt_event_handlers.conditional_cases`` + + - ``Page.transition_routes.trigger_fulfillment.messages`` + - ``Page.transition_routes.trigger_fulfillment.conditional_cases`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page: "Page" = proto.Field( + proto.MESSAGE, + number=2, + message="Page", + ) + language_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdatePageRequest(proto.Message): + r"""The request message for + [Pages.UpdatePage][google.cloud.dialogflow.cx.v3.Pages.UpdatePage]. + + Attributes: + page (google.cloud.dialogflowcx_v3.types.Page): + Required. The page to update. + language_code (str): + The language of the following fields in ``page``: + + - ``Page.entry_fulfillment.messages`` + - ``Page.entry_fulfillment.conditional_cases`` + - ``Page.event_handlers.trigger_fulfillment.messages`` + - ``Page.event_handlers.trigger_fulfillment.conditional_cases`` + - ``Page.form.parameters.fill_behavior.initial_prompt_fulfillment.messages`` + - + + ``Page.form.parameters.fill_behavior.initial_prompt_fulfillment.conditional_cases`` + + - ``Page.form.parameters.fill_behavior.reprompt_event_handlers.messages`` + - + + ``Page.form.parameters.fill_behavior.reprompt_event_handlers.conditional_cases`` + + - ``Page.transition_routes.trigger_fulfillment.messages`` + - ``Page.transition_routes.trigger_fulfillment.conditional_cases`` + + If not specified, the agent's default language is used. + `Many + languages `__ + are supported. Note: languages must be enabled in the agent + before they can be used. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The mask to control which fields get updated. + If the mask is not present, all fields will be + updated. + """ + + page: "Page" = proto.Field( + proto.MESSAGE, + number=1, + message="Page", + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class DeletePageRequest(proto.Message): + r"""The request message for + [Pages.DeletePage][google.cloud.dialogflow.cx.v3.Pages.DeletePage]. + + Attributes: + name (str): + Required. The name of the page to delete. Format: + ``projects//locations//agents//Flows//pages/``. + force (bool): + This field has no effect for pages with no incoming + transitions. For pages with incoming transitions: + + - If ``force`` is set to false, an error will be returned + with message indicating the incoming transitions. + - If ``force`` is set to true, Dialogflow will remove the + page, as well as any transitions to the page (i.e. + [Target page][EventHandler.target_page] in event handlers + or [Target page][TransitionRoute.target_page] in + transition routes that point to this page will be + cleared). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class KnowledgeConnectorSettings(proto.Message): + r"""The Knowledge Connector settings for this page or flow. + This includes information such as the attached Knowledge Bases, + and the way to execute fulfillment. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Whether Knowledge Connector is enabled or + not. + trigger_fulfillment (google.cloud.dialogflowcx_v3.types.Fulfillment): + The fulfillment to be triggered. + + When the answers from the Knowledge Connector are selected + by Dialogflow, you can utitlize the request scoped parameter + ``$request.knowledge.answers`` (contains up to the 5 highest + confidence answers) and ``$request.knowledge.questions`` + (contains the corresponding questions) to construct the + fulfillment. + target_page (str): + The target page to transition to. Format: + ``projects//locations//agents//flows//pages/``. + + This field is a member of `oneof`_ ``target``. + target_flow (str): + The target flow to transition to. Format: + ``projects//locations//agents//flows/``. + + This field is a member of `oneof`_ ``target``. + data_store_connections (MutableSequence[google.cloud.dialogflowcx_v3.types.DataStoreConnection]): + Optional. List of related data store + connections. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + trigger_fulfillment: fulfillment.Fulfillment = proto.Field( + proto.MESSAGE, + number=3, + message=fulfillment.Fulfillment, + ) + target_page: str = proto.Field( + proto.STRING, + number=4, + oneof="target", + ) + target_flow: str = proto.Field( + proto.STRING, + number=5, + oneof="target", + ) + data_store_connections: MutableSequence[ + data_store_connection.DataStoreConnection + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=data_store_connection.DataStoreConnection, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/response_message.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/response_message.py new file mode 100644 index 000000000000..5e0d03046896 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/response_message.py @@ -0,0 +1,486 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "ResponseMessage", + }, +) + + +class ResponseMessage(proto.Message): + r"""Represents a response message that can be returned by a + conversational agent. + + Response messages are also used for output audio synthesis. The + approach is as follows: + + - If at least one OutputAudioText response is present, then all + OutputAudioText responses are linearly concatenated, and the + result is used for output audio synthesis. + - If the OutputAudioText responses are a mixture of text and SSML, + then the concatenated result is treated as SSML; otherwise, the + result is treated as either text or SSML as appropriate. The + agent designer should ideally use either text or SSML + consistently throughout the bot design. + - Otherwise, all Text responses are linearly concatenated, and the + result is used for output audio synthesis. + + This approach allows for more sophisticated user experience + scenarios, where the text displayed to the user may differ from what + is heard. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text (google.cloud.dialogflowcx_v3.types.ResponseMessage.Text): + Returns a text response. + + This field is a member of `oneof`_ ``message``. + payload (google.protobuf.struct_pb2.Struct): + Returns a response containing a custom, + platform-specific payload. + + This field is a member of `oneof`_ ``message``. + conversation_success (google.cloud.dialogflowcx_v3.types.ResponseMessage.ConversationSuccess): + Indicates that the conversation succeeded. + + This field is a member of `oneof`_ ``message``. + output_audio_text (google.cloud.dialogflowcx_v3.types.ResponseMessage.OutputAudioText): + A text or ssml response that is + preferentially used for TTS output audio + synthesis, as described in the comment on the + ResponseMessage message. + + This field is a member of `oneof`_ ``message``. + live_agent_handoff (google.cloud.dialogflowcx_v3.types.ResponseMessage.LiveAgentHandoff): + Hands off conversation to a human agent. + + This field is a member of `oneof`_ ``message``. + end_interaction (google.cloud.dialogflowcx_v3.types.ResponseMessage.EndInteraction): + Output only. A signal that indicates the interaction with + the Dialogflow agent has ended. This message is generated by + Dialogflow only when the conversation reaches + ``END_SESSION`` page. It is not supposed to be defined by + the user. + + It's guaranteed that there is at most one such message in + each response. + + This field is a member of `oneof`_ ``message``. + play_audio (google.cloud.dialogflowcx_v3.types.ResponseMessage.PlayAudio): + Signal that the client should play an audio clip hosted at a + client-specific URI. Dialogflow uses this to construct + [mixed_audio][google.cloud.dialogflow.cx.v3.ResponseMessage.mixed_audio]. + However, Dialogflow itself does not try to read or process + the URI in any way. + + This field is a member of `oneof`_ ``message``. + mixed_audio (google.cloud.dialogflowcx_v3.types.ResponseMessage.MixedAudio): + Output only. An audio response message composed of both the + synthesized Dialogflow agent responses and responses defined + via + [play_audio][google.cloud.dialogflow.cx.v3.ResponseMessage.play_audio]. + This message is generated by Dialogflow only and not + supposed to be defined by the user. + + This field is a member of `oneof`_ ``message``. + telephony_transfer_call (google.cloud.dialogflowcx_v3.types.ResponseMessage.TelephonyTransferCall): + A signal that the client should transfer the + phone call connected to this agent to a + third-party endpoint. + + This field is a member of `oneof`_ ``message``. + knowledge_info_card (google.cloud.dialogflowcx_v3.types.ResponseMessage.KnowledgeInfoCard): + Represents info card for knowledge answers, + to be better rendered in Dialogflow Messenger. + + This field is a member of `oneof`_ ``message``. + response_type (google.cloud.dialogflowcx_v3.types.ResponseMessage.ResponseType): + Response type. + channel (str): + The channel which the response is associated with. Clients + can specify the channel via + [QueryParameters.channel][google.cloud.dialogflow.cx.v3.QueryParameters.channel], + and only associated channel response will be returned. + """ + + class ResponseType(proto.Enum): + r"""Represents different response types. + + Values: + RESPONSE_TYPE_UNSPECIFIED (0): + Not specified. + ENTRY_PROMPT (1): + The response is from an [entry + prompt][google.cloud.dialogflow.cx.v3.Page.entry_fulfillment] + in the page. + PARAMETER_PROMPT (2): + The response is from [form-filling + prompt][google.cloud.dialogflow.cx.v3.Form.Parameter.fill_behavior] + in the page. + HANDLER_PROMPT (3): + The response is from a [transition + route][google.cloud.dialogflow.cx.v3.TransitionRoute] or an + [event handler][EventHandler] in the page or flow or + transition route group. + """ + RESPONSE_TYPE_UNSPECIFIED = 0 + ENTRY_PROMPT = 1 + PARAMETER_PROMPT = 2 + HANDLER_PROMPT = 3 + + class Text(proto.Message): + r"""The text response message. + + Attributes: + text (MutableSequence[str]): + Required. A collection of text responses. + allow_playback_interruption (bool): + Output only. Whether the playback of this + message can be interrupted by the end user's + speech and the client can then starts the next + Dialogflow request. + """ + + text: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + allow_playback_interruption: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class LiveAgentHandoff(proto.Message): + r"""Indicates that the conversation should be handed off to a live + agent. + + Dialogflow only uses this to determine which conversations were + handed off to a human agent for measurement purposes. What else to + do with this signal is up to you and your handoff procedures. + + You may set this, for example: + + - In the + [entry_fulfillment][google.cloud.dialogflow.cx.v3.Page.entry_fulfillment] + of a [Page][google.cloud.dialogflow.cx.v3.Page] if entering the + page indicates something went extremely wrong in the + conversation. + - In a webhook response when you determine that the customer issue + can only be handled by a human. + + Attributes: + metadata (google.protobuf.struct_pb2.Struct): + Custom metadata for your handoff procedure. + Dialogflow doesn't impose any structure on this. + """ + + metadata: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + class ConversationSuccess(proto.Message): + r"""Indicates that the conversation succeeded, i.e., the bot handled the + issue that the customer talked to it about. + + Dialogflow only uses this to determine which conversations should be + counted as successful and doesn't process the metadata in this + message in any way. Note that Dialogflow also considers + conversations that get to the conversation end page as successful + even if they don't return + [ConversationSuccess][google.cloud.dialogflow.cx.v3.ResponseMessage.ConversationSuccess]. + + You may set this, for example: + + - In the + [entry_fulfillment][google.cloud.dialogflow.cx.v3.Page.entry_fulfillment] + of a [Page][google.cloud.dialogflow.cx.v3.Page] if entering the + page indicates that the conversation succeeded. + - In a webhook response when you determine that you handled the + customer issue. + + Attributes: + metadata (google.protobuf.struct_pb2.Struct): + Custom metadata. Dialogflow doesn't impose + any structure on this. + """ + + metadata: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + class OutputAudioText(proto.Message): + r"""A text or ssml response that is preferentially used for TTS + output audio synthesis, as described in the comment on the + ResponseMessage message. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text (str): + The raw text to be synthesized. + + This field is a member of `oneof`_ ``source``. + ssml (str): + The SSML text to be synthesized. For more information, see + `SSML `__. + + This field is a member of `oneof`_ ``source``. + allow_playback_interruption (bool): + Output only. Whether the playback of this + message can be interrupted by the end user's + speech and the client can then starts the next + Dialogflow request. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + oneof="source", + ) + ssml: str = proto.Field( + proto.STRING, + number=2, + oneof="source", + ) + allow_playback_interruption: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class EndInteraction(proto.Message): + r"""Indicates that interaction with the Dialogflow agent has + ended. This message is generated by Dialogflow only and not + supposed to be defined by the user. + + """ + + class PlayAudio(proto.Message): + r"""Specifies an audio clip to be played by the client as part of + the response. + + Attributes: + audio_uri (str): + Required. URI of the audio clip. Dialogflow + does not impose any validation on this value. It + is specific to the client that reads it. + allow_playback_interruption (bool): + Output only. Whether the playback of this + message can be interrupted by the end user's + speech and the client can then starts the next + Dialogflow request. + """ + + audio_uri: str = proto.Field( + proto.STRING, + number=1, + ) + allow_playback_interruption: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class MixedAudio(proto.Message): + r"""Represents an audio message that is composed of both segments + synthesized from the Dialogflow agent prompts and ones hosted + externally at the specified URIs. The external URIs are specified + via + [play_audio][google.cloud.dialogflow.cx.v3.ResponseMessage.play_audio]. + This message is generated by Dialogflow only and not supposed to be + defined by the user. + + Attributes: + segments (MutableSequence[google.cloud.dialogflowcx_v3.types.ResponseMessage.MixedAudio.Segment]): + Segments this audio response is composed of. + """ + + class Segment(proto.Message): + r"""Represents one segment of audio. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audio (bytes): + Raw audio synthesized from the Dialogflow + agent's response using the output config + specified in the request. + + This field is a member of `oneof`_ ``content``. + uri (str): + Client-specific URI that points to an audio + clip accessible to the client. Dialogflow does + not impose any validation on it. + + This field is a member of `oneof`_ ``content``. + allow_playback_interruption (bool): + Output only. Whether the playback of this + segment can be interrupted by the end user's + speech and the client should then start the next + Dialogflow request. + """ + + audio: bytes = proto.Field( + proto.BYTES, + number=1, + oneof="content", + ) + uri: str = proto.Field( + proto.STRING, + number=2, + oneof="content", + ) + allow_playback_interruption: bool = proto.Field( + proto.BOOL, + number=3, + ) + + segments: MutableSequence[ + "ResponseMessage.MixedAudio.Segment" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ResponseMessage.MixedAudio.Segment", + ) + + class TelephonyTransferCall(proto.Message): + r"""Represents the signal that telles the client to transfer the + phone call connected to the agent to a third-party endpoint. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + phone_number (str): + Transfer the call to a phone number in `E.164 + format `__. + + This field is a member of `oneof`_ ``endpoint``. + """ + + phone_number: str = proto.Field( + proto.STRING, + number=1, + oneof="endpoint", + ) + + class KnowledgeInfoCard(proto.Message): + r"""Represents info card response. If the response contains + generative knowledge prediction, Dialogflow will return a + payload with Infobot Messenger compatible info card. + + Otherwise, the info card response is skipped. + + """ + + text: Text = proto.Field( + proto.MESSAGE, + number=1, + oneof="message", + message=Text, + ) + payload: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + oneof="message", + message=struct_pb2.Struct, + ) + conversation_success: ConversationSuccess = proto.Field( + proto.MESSAGE, + number=9, + oneof="message", + message=ConversationSuccess, + ) + output_audio_text: OutputAudioText = proto.Field( + proto.MESSAGE, + number=8, + oneof="message", + message=OutputAudioText, + ) + live_agent_handoff: LiveAgentHandoff = proto.Field( + proto.MESSAGE, + number=10, + oneof="message", + message=LiveAgentHandoff, + ) + end_interaction: EndInteraction = proto.Field( + proto.MESSAGE, + number=11, + oneof="message", + message=EndInteraction, + ) + play_audio: PlayAudio = proto.Field( + proto.MESSAGE, + number=12, + oneof="message", + message=PlayAudio, + ) + mixed_audio: MixedAudio = proto.Field( + proto.MESSAGE, + number=13, + oneof="message", + message=MixedAudio, + ) + telephony_transfer_call: TelephonyTransferCall = proto.Field( + proto.MESSAGE, + number=18, + oneof="message", + message=TelephonyTransferCall, + ) + knowledge_info_card: KnowledgeInfoCard = proto.Field( + proto.MESSAGE, + number=20, + oneof="message", + message=KnowledgeInfoCard, + ) + response_type: ResponseType = proto.Field( + proto.ENUM, + number=4, + enum=ResponseType, + ) + channel: str = proto.Field( + proto.STRING, + number=19, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/safety_settings.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/safety_settings.py new file mode 100644 index 000000000000..94564929eb8d --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/safety_settings.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "SafetySettings", + }, +) + + +class SafetySettings(proto.Message): + r"""Settings for Generative Safety. + + Attributes: + banned_phrases (MutableSequence[google.cloud.dialogflowcx_v3.types.SafetySettings.Phrase]): + Banned phrases for generated text. + """ + + class Phrase(proto.Message): + r"""Text input which can be used for prompt or banned phrases. + + Attributes: + text (str): + Required. Text input which can be used for + prompt or banned phrases. + language_code (str): + Required. Language code of the phrase. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + banned_phrases: MutableSequence[Phrase] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Phrase, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/security_settings.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/security_settings.py new file mode 100644 index 000000000000..85914fbedb4a --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/security_settings.py @@ -0,0 +1,483 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "GetSecuritySettingsRequest", + "UpdateSecuritySettingsRequest", + "ListSecuritySettingsRequest", + "ListSecuritySettingsResponse", + "CreateSecuritySettingsRequest", + "DeleteSecuritySettingsRequest", + "SecuritySettings", + }, +) + + +class GetSecuritySettingsRequest(proto.Message): + r"""The request message for + [SecuritySettingsService.GetSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.GetSecuritySettings]. + + Attributes: + name (str): + Required. Resource name of the settings. Format: + ``projects//locations//securitySettings/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateSecuritySettingsRequest(proto.Message): + r"""The request message for + [SecuritySettingsService.UpdateSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.UpdateSecuritySettings]. + + Attributes: + security_settings (google.cloud.dialogflowcx_v3.types.SecuritySettings): + Required. [SecuritySettings] object that contains values for + each of the fields to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The mask to control which fields + get updated. If the mask is not present, all + fields will be updated. + """ + + security_settings: "SecuritySettings" = proto.Field( + proto.MESSAGE, + number=1, + message="SecuritySettings", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListSecuritySettingsRequest(proto.Message): + r"""The request message for [SecuritySettings.ListSecuritySettings][]. + + Attributes: + parent (str): + Required. The location to list all security settings for. + Format: ``projects//locations/``. + page_size (int): + The maximum number of items to return in a + single page. By default 20 and at most 100. + page_token (str): + The next_page_token value returned from a previous list + request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSecuritySettingsResponse(proto.Message): + r"""The response message for [SecuritySettings.ListSecuritySettings][]. + + Attributes: + security_settings (MutableSequence[google.cloud.dialogflowcx_v3.types.SecuritySettings]): + The list of security settings. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + security_settings: MutableSequence["SecuritySettings"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SecuritySettings", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateSecuritySettingsRequest(proto.Message): + r"""The request message for [SecuritySettings.CreateSecuritySettings][]. + + Attributes: + parent (str): + Required. The location to create an + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] + for. Format: + ``projects//locations/``. + security_settings (google.cloud.dialogflowcx_v3.types.SecuritySettings): + Required. The security settings to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + security_settings: "SecuritySettings" = proto.Field( + proto.MESSAGE, + number=2, + message="SecuritySettings", + ) + + +class DeleteSecuritySettingsRequest(proto.Message): + r"""The request message for [SecuritySettings.DeleteSecuritySettings][]. + + Attributes: + name (str): + Required. The name of the + [SecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettings] + to delete. Format: + ``projects//locations//securitySettings/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SecuritySettings(proto.Message): + r"""Represents the settings related to security issues, such as + data redaction and data retention. It may take hours for updates + on the settings to propagate to all the related components and + take effect. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Resource name of the settings. Required for the + [SecuritySettingsService.UpdateSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.UpdateSecuritySettings] + method. + [SecuritySettingsService.CreateSecuritySettings][google.cloud.dialogflow.cx.v3.SecuritySettingsService.CreateSecuritySettings] + populates the name automatically. Format: + ``projects//locations//securitySettings/``. + display_name (str): + Required. The human-readable name of the + security settings, unique within the location. + redaction_strategy (google.cloud.dialogflowcx_v3.types.SecuritySettings.RedactionStrategy): + Strategy that defines how we do redaction. + redaction_scope (google.cloud.dialogflowcx_v3.types.SecuritySettings.RedactionScope): + Defines the data for which Dialogflow applies + redaction. Dialogflow does not redact data that + it does not have access to – for example, Cloud + logging. + inspect_template (str): + `DLP `__ inspect template + name. Use this template to define inspect base settings. + + The ``DLP Inspect Templates Reader`` role is needed on the + Dialogflow service identity service account (has the form + ``service-PROJECT_NUMBER@gcp-sa-dialogflow.iam.gserviceaccount.com``) + for your agent's project. + + If empty, we use the default DLP inspect config. + + The template name will have one of the following formats: + ``projects//locations//inspectTemplates/